var/home/core/zuul-output/0000755000175000017500000000000015115412674014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015115417576015505 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003764635715115417567017736 0ustar rootrootDec 08 00:07:23 crc systemd[1]: Starting Kubernetes Kubelet... Dec 08 00:07:23 crc restorecon[4705]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:23 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 08 00:07:24 crc restorecon[4705]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 08 00:07:24 crc kubenswrapper[4745]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 00:07:24 crc kubenswrapper[4745]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 08 00:07:24 crc kubenswrapper[4745]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 00:07:24 crc kubenswrapper[4745]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 00:07:24 crc kubenswrapper[4745]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 08 00:07:24 crc kubenswrapper[4745]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.686302 4745 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689255 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689275 4745 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689280 4745 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689284 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689288 4745 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689292 4745 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689296 4745 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689299 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689304 4745 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689308 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689312 4745 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689317 4745 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689322 4745 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689326 4745 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689331 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689335 4745 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689340 4745 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689344 4745 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689349 4745 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689353 4745 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689357 4745 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689361 4745 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689365 4745 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689368 4745 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689374 4745 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689378 4745 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689382 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689386 4745 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689389 4745 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689393 4745 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689396 4745 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689400 4745 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689403 4745 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689408 4745 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689413 4745 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689417 4745 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689421 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689425 4745 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689429 4745 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689433 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689436 4745 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689440 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689443 4745 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689447 4745 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689451 4745 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689454 4745 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689457 4745 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689461 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689464 4745 feature_gate.go:330] unrecognized feature gate: Example Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689468 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689471 4745 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689475 4745 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689479 4745 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689482 4745 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689487 4745 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689492 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689496 4745 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689500 4745 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689504 4745 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689507 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689511 4745 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689515 4745 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689518 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689522 4745 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689525 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689528 4745 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689532 4745 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689536 4745 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689541 4745 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689544 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.689548 4745 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689621 4745 flags.go:64] FLAG: --address="0.0.0.0" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689629 4745 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689637 4745 flags.go:64] FLAG: --anonymous-auth="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689643 4745 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689650 4745 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689654 4745 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689659 4745 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689665 4745 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689670 4745 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689674 4745 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689678 4745 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689683 4745 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689687 4745 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689692 4745 flags.go:64] FLAG: --cgroup-root="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689697 4745 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689701 4745 flags.go:64] FLAG: --client-ca-file="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689705 4745 flags.go:64] FLAG: --cloud-config="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689709 4745 flags.go:64] FLAG: --cloud-provider="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689713 4745 flags.go:64] FLAG: --cluster-dns="[]" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689718 4745 flags.go:64] FLAG: --cluster-domain="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689722 4745 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689726 4745 flags.go:64] FLAG: --config-dir="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689730 4745 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689735 4745 flags.go:64] FLAG: --container-log-max-files="5" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689740 4745 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689744 4745 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689749 4745 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689754 4745 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689758 4745 flags.go:64] FLAG: --contention-profiling="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689762 4745 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689766 4745 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689770 4745 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689774 4745 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689779 4745 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689784 4745 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689788 4745 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689792 4745 flags.go:64] FLAG: --enable-load-reader="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689796 4745 flags.go:64] FLAG: --enable-server="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689801 4745 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689806 4745 flags.go:64] FLAG: --event-burst="100" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689811 4745 flags.go:64] FLAG: --event-qps="50" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689815 4745 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689819 4745 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689823 4745 flags.go:64] FLAG: --eviction-hard="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689829 4745 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689833 4745 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689837 4745 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689842 4745 flags.go:64] FLAG: --eviction-soft="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689846 4745 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689850 4745 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689854 4745 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689858 4745 flags.go:64] FLAG: --experimental-mounter-path="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689862 4745 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689866 4745 flags.go:64] FLAG: --fail-swap-on="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689870 4745 flags.go:64] FLAG: --feature-gates="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689876 4745 flags.go:64] FLAG: --file-check-frequency="20s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689880 4745 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689884 4745 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689888 4745 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689892 4745 flags.go:64] FLAG: --healthz-port="10248" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689896 4745 flags.go:64] FLAG: --help="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689900 4745 flags.go:64] FLAG: --hostname-override="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689904 4745 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689908 4745 flags.go:64] FLAG: --http-check-frequency="20s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689912 4745 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689916 4745 flags.go:64] FLAG: --image-credential-provider-config="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689934 4745 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689938 4745 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689942 4745 flags.go:64] FLAG: --image-service-endpoint="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689946 4745 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689950 4745 flags.go:64] FLAG: --kube-api-burst="100" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689954 4745 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689959 4745 flags.go:64] FLAG: --kube-api-qps="50" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689963 4745 flags.go:64] FLAG: --kube-reserved="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689967 4745 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689971 4745 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689976 4745 flags.go:64] FLAG: --kubelet-cgroups="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689980 4745 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689984 4745 flags.go:64] FLAG: --lock-file="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689988 4745 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689992 4745 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.689996 4745 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690002 4745 flags.go:64] FLAG: --log-json-split-stream="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690007 4745 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690011 4745 flags.go:64] FLAG: --log-text-split-stream="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690015 4745 flags.go:64] FLAG: --logging-format="text" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690019 4745 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690023 4745 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690027 4745 flags.go:64] FLAG: --manifest-url="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690031 4745 flags.go:64] FLAG: --manifest-url-header="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690036 4745 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690040 4745 flags.go:64] FLAG: --max-open-files="1000000" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690045 4745 flags.go:64] FLAG: --max-pods="110" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690049 4745 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690054 4745 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690057 4745 flags.go:64] FLAG: --memory-manager-policy="None" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690061 4745 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690065 4745 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690070 4745 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690074 4745 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690083 4745 flags.go:64] FLAG: --node-status-max-images="50" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690087 4745 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690091 4745 flags.go:64] FLAG: --oom-score-adj="-999" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690095 4745 flags.go:64] FLAG: --pod-cidr="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690099 4745 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690105 4745 flags.go:64] FLAG: --pod-manifest-path="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690109 4745 flags.go:64] FLAG: --pod-max-pids="-1" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690113 4745 flags.go:64] FLAG: --pods-per-core="0" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690117 4745 flags.go:64] FLAG: --port="10250" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690121 4745 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690128 4745 flags.go:64] FLAG: --provider-id="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690132 4745 flags.go:64] FLAG: --qos-reserved="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690136 4745 flags.go:64] FLAG: --read-only-port="10255" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690141 4745 flags.go:64] FLAG: --register-node="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690144 4745 flags.go:64] FLAG: --register-schedulable="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690148 4745 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690156 4745 flags.go:64] FLAG: --registry-burst="10" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690160 4745 flags.go:64] FLAG: --registry-qps="5" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690164 4745 flags.go:64] FLAG: --reserved-cpus="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690168 4745 flags.go:64] FLAG: --reserved-memory="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690173 4745 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690177 4745 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690181 4745 flags.go:64] FLAG: --rotate-certificates="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690185 4745 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690189 4745 flags.go:64] FLAG: --runonce="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690193 4745 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690197 4745 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690201 4745 flags.go:64] FLAG: --seccomp-default="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690205 4745 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690209 4745 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690214 4745 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690219 4745 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690223 4745 flags.go:64] FLAG: --storage-driver-password="root" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690227 4745 flags.go:64] FLAG: --storage-driver-secure="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690231 4745 flags.go:64] FLAG: --storage-driver-table="stats" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690235 4745 flags.go:64] FLAG: --storage-driver-user="root" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690239 4745 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690243 4745 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690247 4745 flags.go:64] FLAG: --system-cgroups="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690251 4745 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690258 4745 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690265 4745 flags.go:64] FLAG: --tls-cert-file="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690269 4745 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690274 4745 flags.go:64] FLAG: --tls-min-version="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690278 4745 flags.go:64] FLAG: --tls-private-key-file="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690282 4745 flags.go:64] FLAG: --topology-manager-policy="none" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690286 4745 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690290 4745 flags.go:64] FLAG: --topology-manager-scope="container" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690294 4745 flags.go:64] FLAG: --v="2" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690300 4745 flags.go:64] FLAG: --version="false" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690305 4745 flags.go:64] FLAG: --vmodule="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690310 4745 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690314 4745 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690412 4745 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690417 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690421 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690426 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690430 4745 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690435 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690439 4745 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690442 4745 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690446 4745 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690450 4745 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690454 4745 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690457 4745 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690461 4745 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690464 4745 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690468 4745 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690472 4745 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690477 4745 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690481 4745 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690484 4745 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690488 4745 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690493 4745 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690497 4745 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690501 4745 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690504 4745 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690508 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690511 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690516 4745 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690519 4745 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690523 4745 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690527 4745 feature_gate.go:330] unrecognized feature gate: Example Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690530 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690534 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690537 4745 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690542 4745 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690546 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690551 4745 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690554 4745 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690558 4745 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690562 4745 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690566 4745 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690569 4745 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690573 4745 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690576 4745 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690580 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690583 4745 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690587 4745 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690590 4745 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690594 4745 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690597 4745 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690601 4745 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690604 4745 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690608 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690611 4745 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690616 4745 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690620 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690624 4745 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690628 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690632 4745 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690636 4745 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690639 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690643 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690646 4745 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690649 4745 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690653 4745 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690656 4745 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690660 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690665 4745 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690669 4745 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690672 4745 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690676 4745 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.690679 4745 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.690685 4745 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.702742 4745 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.702772 4745 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702842 4745 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702849 4745 feature_gate.go:330] unrecognized feature gate: Example Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702854 4745 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702858 4745 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702863 4745 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702868 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702873 4745 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702877 4745 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702882 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702885 4745 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702889 4745 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702893 4745 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702897 4745 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702901 4745 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702905 4745 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702909 4745 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702914 4745 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702920 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702939 4745 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702945 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702949 4745 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702954 4745 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702958 4745 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702961 4745 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702965 4745 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702968 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702971 4745 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702976 4745 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702980 4745 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702984 4745 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702990 4745 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702994 4745 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.702998 4745 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703002 4745 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703005 4745 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703009 4745 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703013 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703017 4745 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703020 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703024 4745 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703028 4745 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703032 4745 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703036 4745 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703040 4745 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703044 4745 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703047 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703051 4745 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703054 4745 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703057 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703061 4745 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703065 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703068 4745 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703071 4745 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703075 4745 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703078 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703082 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703085 4745 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703089 4745 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703092 4745 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703096 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703101 4745 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703105 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703109 4745 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703113 4745 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703116 4745 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703120 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703123 4745 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703127 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703130 4745 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703134 4745 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703138 4745 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.703144 4745 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703510 4745 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703611 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703623 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703636 4745 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703645 4745 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703653 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703661 4745 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703669 4745 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703678 4745 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703686 4745 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703694 4745 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703702 4745 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703710 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703719 4745 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703727 4745 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703739 4745 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703755 4745 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703765 4745 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703774 4745 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703782 4745 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703793 4745 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703808 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703817 4745 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703825 4745 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703837 4745 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703846 4745 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703854 4745 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703862 4745 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703869 4745 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703877 4745 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703887 4745 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703896 4745 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703904 4745 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703911 4745 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703984 4745 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.703992 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704000 4745 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704016 4745 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704024 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704032 4745 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704043 4745 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704053 4745 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704063 4745 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704072 4745 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704087 4745 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704098 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704109 4745 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704119 4745 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704130 4745 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704141 4745 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704152 4745 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704162 4745 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704172 4745 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704186 4745 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704196 4745 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704205 4745 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704214 4745 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704224 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704232 4745 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704240 4745 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704248 4745 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704255 4745 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704263 4745 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704272 4745 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704279 4745 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704292 4745 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704302 4745 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704311 4745 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704323 4745 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704334 4745 feature_gate.go:330] unrecognized feature gate: Example Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.704345 4745 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.704363 4745 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.704822 4745 server.go:940] "Client rotation is on, will bootstrap in background" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.709987 4745 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.710197 4745 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.711171 4745 server.go:997] "Starting client certificate rotation" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.711232 4745 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.711824 4745 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-25 14:07:39.419023355 +0000 UTC Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.712066 4745 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.719616 4745 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.721816 4745 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.723257 4745 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.738103 4745 log.go:25] "Validated CRI v1 runtime API" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.752125 4745 log.go:25] "Validated CRI v1 image API" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.754538 4745 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.758328 4745 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-08-00-02-06-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.758374 4745 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.786758 4745 manager.go:217] Machine: {Timestamp:2025-12-08 00:07:24.784450953 +0000 UTC m=+0.213657333 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:9432629a-1315-4cc4-898c-8395e23ff1ce BootID:14c0f51d-529f-4632-8014-1290968372b9 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:d5:69:7c Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:d5:69:7c Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:2f:13:fb Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:30:de:15 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:80:82:98 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:d0:63:d3 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:66:57:6a:f9:70:05 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:76:a9:9b:25:7c:3e Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.788506 4745 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.788957 4745 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.789966 4745 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.790353 4745 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.790410 4745 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.790955 4745 topology_manager.go:138] "Creating topology manager with none policy" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.790989 4745 container_manager_linux.go:303] "Creating device plugin manager" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.791433 4745 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.791510 4745 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.791843 4745 state_mem.go:36] "Initialized new in-memory state store" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.792565 4745 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.793736 4745 kubelet.go:418] "Attempting to sync node with API server" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.793776 4745 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.793819 4745 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.793841 4745 kubelet.go:324] "Adding apiserver pod source" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.793862 4745 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.796453 4745 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.796564 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.796565 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.796707 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.796716 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.797082 4745 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.798228 4745 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799151 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799195 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799211 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799226 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799248 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799263 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799276 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799300 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799317 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799333 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799352 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.799365 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.800026 4745 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.800755 4745 server.go:1280] "Started kubelet" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.801181 4745 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.801204 4745 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.801189 4745 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.802115 4745 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 08 00:07:24 crc systemd[1]: Started Kubernetes Kubelet. Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.803899 4745 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.804015 4745 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.804367 4745 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 19:42:39.94171962 +0000 UTC Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.804422 4745 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 931h35m15.13730185s for next certificate rotation Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.804525 4745 server.go:460] "Adding debug handlers to kubelet server" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.804588 4745 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.804616 4745 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.804853 4745 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.805180 4745 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.812059 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.812208 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.812488 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="200ms" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.813883 4745 factory.go:55] Registering systemd factory Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.813951 4745 factory.go:221] Registration of the systemd container factory successfully Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.814832 4745 factory.go:153] Registering CRI-O factory Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.814874 4745 factory.go:221] Registration of the crio container factory successfully Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.815014 4745 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.815046 4745 factory.go:103] Registering Raw factory Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.815074 4745 manager.go:1196] Started watching for new ooms in manager Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.815429 4745 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187f14cbdec21931 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 00:07:24.800702769 +0000 UTC m=+0.229909099,LastTimestamp:2025-12-08 00:07:24.800702769 +0000 UTC m=+0.229909099,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.817118 4745 manager.go:319] Starting recovery of all containers Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.829969 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830068 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830092 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830114 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830134 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830154 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830173 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830190 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830212 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830233 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830252 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830272 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830292 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830316 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830335 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830352 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830371 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830401 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830418 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830464 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830483 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830500 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830522 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830542 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830561 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830580 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830651 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830674 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830694 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830711 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830728 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830747 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830769 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830789 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830808 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830829 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830846 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830864 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830883 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.830900 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831052 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831077 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831096 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831113 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831137 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831156 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831173 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831192 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831213 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831230 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831249 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831268 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831292 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831312 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831333 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831352 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831372 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831393 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831409 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831427 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831447 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831469 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831486 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831506 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831524 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831542 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831562 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831580 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831597 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831616 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831635 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831652 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831669 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831690 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831706 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831726 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831749 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831819 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831871 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831892 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831916 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.831981 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832009 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832027 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832044 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832062 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832081 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832100 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832121 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832140 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832158 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832176 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832196 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832213 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832230 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832248 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832267 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832285 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832301 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832320 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832339 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832356 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832372 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832389 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832415 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832435 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832453 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832472 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832494 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832513 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832532 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832552 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832571 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832589 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832614 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832631 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832649 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832667 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832716 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832733 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832750 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832767 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832784 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832802 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832822 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832841 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832859 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832880 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832899 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832917 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.832962 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833013 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833034 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833055 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833074 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833091 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833112 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833132 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833150 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833171 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833189 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833208 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833225 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833243 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833261 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833277 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833296 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833313 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833332 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833352 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833369 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833444 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833463 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833483 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833501 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833518 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833538 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833556 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833575 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833594 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833612 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833630 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833650 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833669 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833686 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833705 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833725 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833743 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833760 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833778 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833818 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.833844 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834759 4745 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834816 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834838 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834867 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834890 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834908 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834963 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.834991 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835011 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835028 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835048 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835068 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835090 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835108 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835128 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835146 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835167 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835187 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835205 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835222 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835241 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835262 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835281 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835300 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835318 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835338 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835356 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835373 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835396 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835413 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835432 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835451 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835470 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835488 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835505 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835523 4745 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835540 4745 reconstruct.go:97] "Volume reconstruction finished" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.835552 4745 reconciler.go:26] "Reconciler: start to sync state" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.850732 4745 manager.go:324] Recovery completed Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.873546 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.876109 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.876157 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.876168 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.877313 4745 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.877384 4745 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.877410 4745 state_mem.go:36] "Initialized new in-memory state store" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.879061 4745 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.881267 4745 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.881341 4745 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.881381 4745 kubelet.go:2335] "Starting kubelet main sync loop" Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.881465 4745 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 08 00:07:24 crc kubenswrapper[4745]: W1208 00:07:24.882662 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.882719 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.892056 4745 policy_none.go:49] "None policy: Start" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.892870 4745 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.892901 4745 state_mem.go:35] "Initializing new in-memory state store" Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.909131 4745 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.946015 4745 manager.go:334] "Starting Device Plugin manager" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.946095 4745 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.946117 4745 server.go:79] "Starting device plugin registration server" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.946690 4745 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.946721 4745 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.947503 4745 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.947611 4745 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.947620 4745 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 08 00:07:24 crc kubenswrapper[4745]: E1208 00:07:24.961058 4745 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.981953 4745 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.982067 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.983277 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.983324 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.983337 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.983507 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.983629 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.983661 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.984550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.984573 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.984597 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.984654 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.984698 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.984716 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.984729 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985140 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985182 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985606 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985797 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985817 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985825 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985963 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.985798 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.986007 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.986945 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.986985 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.986999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987166 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987294 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987354 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987828 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987871 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987908 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987919 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987958 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.987966 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988154 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988202 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988277 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988315 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988331 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988956 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988988 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:24 crc kubenswrapper[4745]: I1208 00:07:24.988999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.013758 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="400ms" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.037998 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038052 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038090 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038121 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038294 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038342 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038376 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038409 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038460 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038497 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038528 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038561 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038592 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038636 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.038673 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.047394 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.048608 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.048650 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.048667 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.048701 4745 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.049197 4745 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.074012 4745 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187f14cbdec21931 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 00:07:24.800702769 +0000 UTC m=+0.229909099,LastTimestamp:2025-12-08 00:07:24.800702769 +0000 UTC m=+0.229909099,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139504 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139566 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139607 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139641 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139673 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139703 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139732 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139765 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139795 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139855 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139888 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139889 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139979 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140023 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140027 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139962 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.139894 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140099 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140146 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140258 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140275 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140300 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140292 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140337 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140360 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140411 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140411 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140446 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140467 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.140532 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.250344 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.251831 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.251883 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.251900 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.251962 4745 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.252496 4745 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.312325 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.330509 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.347638 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-cebeac7baccf90743816677ccd214caf72105949f98948c9e5bd42b7d69f6e2e WatchSource:0}: Error finding container cebeac7baccf90743816677ccd214caf72105949f98948c9e5bd42b7d69f6e2e: Status 404 returned error can't find the container with id cebeac7baccf90743816677ccd214caf72105949f98948c9e5bd42b7d69f6e2e Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.354858 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-ac46ebfc4dcdfde2f0e94ef2cd18fa7c6afc019e603f16dd3e6d4529de362ea0 WatchSource:0}: Error finding container ac46ebfc4dcdfde2f0e94ef2cd18fa7c6afc019e603f16dd3e6d4529de362ea0: Status 404 returned error can't find the container with id ac46ebfc4dcdfde2f0e94ef2cd18fa7c6afc019e603f16dd3e6d4529de362ea0 Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.355049 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.366768 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.370096 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-faa40305a7f1fe8ec7068f010bf1a3fd24a8168033f1955bd74568be41c00a89 WatchSource:0}: Error finding container faa40305a7f1fe8ec7068f010bf1a3fd24a8168033f1955bd74568be41c00a89: Status 404 returned error can't find the container with id faa40305a7f1fe8ec7068f010bf1a3fd24a8168033f1955bd74568be41c00a89 Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.371822 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.403967 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-a3a3a7c1866c9979c8127cdaf0686a843200c1f11d2d388c62e1cd2164b8d7f5 WatchSource:0}: Error finding container a3a3a7c1866c9979c8127cdaf0686a843200c1f11d2d388c62e1cd2164b8d7f5: Status 404 returned error can't find the container with id a3a3a7c1866c9979c8127cdaf0686a843200c1f11d2d388c62e1cd2164b8d7f5 Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.412833 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-66d8fa11859fd861e56a64c1ae2a4ee7438ec29f3ab83015414f1f8ba1b65156 WatchSource:0}: Error finding container 66d8fa11859fd861e56a64c1ae2a4ee7438ec29f3ab83015414f1f8ba1b65156: Status 404 returned error can't find the container with id 66d8fa11859fd861e56a64c1ae2a4ee7438ec29f3ab83015414f1f8ba1b65156 Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.414513 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="800ms" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.652647 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.654670 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.654705 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.654716 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.654738 4745 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.655117 4745 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.666499 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.666553 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.802576 4745 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.872635 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.872790 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.887795 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.888033 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.887751 4745 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3" exitCode=0 Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.888151 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ac46ebfc4dcdfde2f0e94ef2cd18fa7c6afc019e603f16dd3e6d4529de362ea0"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.889237 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.889276 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.889288 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.891419 4745 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422" exitCode=0 Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.891477 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.891500 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"cebeac7baccf90743816677ccd214caf72105949f98948c9e5bd42b7d69f6e2e"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.891573 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.892385 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.892423 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.892440 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.893890 4745 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933" exitCode=0 Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.893919 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.893975 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"66d8fa11859fd861e56a64c1ae2a4ee7438ec29f3ab83015414f1f8ba1b65156"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.894070 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.895004 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.895030 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.895041 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.898698 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.898775 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a3a3a7c1866c9979c8127cdaf0686a843200c1f11d2d388c62e1cd2164b8d7f5"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.902344 4745 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448" exitCode=0 Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.902410 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.902448 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"faa40305a7f1fe8ec7068f010bf1a3fd24a8168033f1955bd74568be41c00a89"} Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.902544 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.904374 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.904414 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.904429 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.911610 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.912293 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.912330 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:25 crc kubenswrapper[4745]: I1208 00:07:25.912343 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:25 crc kubenswrapper[4745]: W1208 00:07:25.935916 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:25 crc kubenswrapper[4745]: E1208 00:07:25.936105 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:26 crc kubenswrapper[4745]: W1208 00:07:26.174592 4745 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 08 00:07:26 crc kubenswrapper[4745]: E1208 00:07:26.174733 4745 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 08 00:07:26 crc kubenswrapper[4745]: E1208 00:07:26.216034 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="1.6s" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.455419 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.456759 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.456804 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.456838 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.456880 4745 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.902206 4745 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.911059 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.911103 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.911118 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.911130 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.913006 4745 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0" exitCode=0 Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.913068 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.913093 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.913980 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.914009 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.914020 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.918641 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.918765 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.919743 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.919769 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.919778 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.921496 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.921528 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.921543 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.921634 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.922653 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.922699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.922718 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.924763 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.924798 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.924812 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a"} Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.924865 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.925898 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.925942 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:26 crc kubenswrapper[4745]: I1208 00:07:26.925954 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.931799 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12"} Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.931906 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.933378 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.933412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.933428 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.935381 4745 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d" exitCode=0 Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.935481 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d"} Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.935514 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.935563 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.937218 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.937270 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.937281 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.937289 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.937318 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:27 crc kubenswrapper[4745]: I1208 00:07:27.937336 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.667035 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.667257 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.668573 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.668650 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.668669 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.864990 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.942917 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e"} Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.943004 4745 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.943074 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.943083 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.943004 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6"} Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.943713 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31"} Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.944413 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.944448 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.944458 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.944478 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.944510 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:28 crc kubenswrapper[4745]: I1208 00:07:28.944526 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:29 crc kubenswrapper[4745]: I1208 00:07:29.951814 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509"} Dec 08 00:07:29 crc kubenswrapper[4745]: I1208 00:07:29.951885 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9"} Dec 08 00:07:29 crc kubenswrapper[4745]: I1208 00:07:29.952035 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:29 crc kubenswrapper[4745]: I1208 00:07:29.953246 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:29 crc kubenswrapper[4745]: I1208 00:07:29.953312 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:29 crc kubenswrapper[4745]: I1208 00:07:29.953332 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.088329 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.088538 4745 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.088595 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.090239 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.090304 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.090329 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.366873 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.367175 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.368970 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.369028 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.369051 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.842830 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.955144 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.956495 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.956541 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:30 crc kubenswrapper[4745]: I1208 00:07:30.956558 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.275280 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.275447 4745 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.275499 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.276764 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.276824 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.276837 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.699487 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.865776 4745 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.865884 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.957490 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.957570 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.958907 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.958979 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.958993 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.959220 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.959279 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:31 crc kubenswrapper[4745]: I1208 00:07:31.959336 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.284769 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.285331 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.287646 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.287715 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.287739 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.291311 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.962116 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.963819 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.963857 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:33 crc kubenswrapper[4745]: I1208 00:07:33.963867 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:34 crc kubenswrapper[4745]: I1208 00:07:34.165059 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:34 crc kubenswrapper[4745]: E1208 00:07:34.962182 4745 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 08 00:07:34 crc kubenswrapper[4745]: I1208 00:07:34.964767 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:34 crc kubenswrapper[4745]: I1208 00:07:34.966586 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:34 crc kubenswrapper[4745]: I1208 00:07:34.966729 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:34 crc kubenswrapper[4745]: I1208 00:07:34.966870 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:36 crc kubenswrapper[4745]: I1208 00:07:36.334126 4745 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 08 00:07:36 crc kubenswrapper[4745]: I1208 00:07:36.334217 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 08 00:07:36 crc kubenswrapper[4745]: E1208 00:07:36.458291 4745 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 08 00:07:36 crc kubenswrapper[4745]: I1208 00:07:36.803216 4745 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 08 00:07:36 crc kubenswrapper[4745]: E1208 00:07:36.904327 4745 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 08 00:07:37 crc kubenswrapper[4745]: I1208 00:07:37.714956 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 08 00:07:37 crc kubenswrapper[4745]: I1208 00:07:37.715150 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:37 crc kubenswrapper[4745]: I1208 00:07:37.716285 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:37 crc kubenswrapper[4745]: I1208 00:07:37.716321 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:37 crc kubenswrapper[4745]: I1208 00:07:37.716332 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:37 crc kubenswrapper[4745]: E1208 00:07:37.817916 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.058729 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.060601 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.060667 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.060680 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.060751 4745 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.247040 4745 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.247120 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.252592 4745 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 08 00:07:38 crc kubenswrapper[4745]: I1208 00:07:38.252668 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.098432 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.098668 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.100276 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.100364 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.100394 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.106146 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.982427 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.983729 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.983793 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:40 crc kubenswrapper[4745]: I1208 00:07:40.983817 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:41 crc kubenswrapper[4745]: I1208 00:07:41.238484 4745 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 08 00:07:41 crc kubenswrapper[4745]: I1208 00:07:41.255369 4745 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 08 00:07:41 crc kubenswrapper[4745]: I1208 00:07:41.866597 4745 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 00:07:41 crc kubenswrapper[4745]: I1208 00:07:41.866786 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.244394 4745 trace.go:236] Trace[1932361500]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 00:07:28.646) (total time: 14597ms): Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[1932361500]: ---"Objects listed" error: 14597ms (00:07:43.244) Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[1932361500]: [14.597337456s] [14.597337456s] END Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.245364 4745 trace.go:236] Trace[959521326]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 00:07:28.894) (total time: 14350ms): Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[959521326]: ---"Objects listed" error: 14350ms (00:07:43.245) Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[959521326]: [14.350318375s] [14.350318375s] END Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.245434 4745 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.245385 4745 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.244537 4745 trace.go:236] Trace[1368177551]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 00:07:28.597) (total time: 14646ms): Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[1368177551]: ---"Objects listed" error: 14646ms (00:07:43.244) Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[1368177551]: [14.646560684s] [14.646560684s] END Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.245602 4745 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.246442 4745 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.247127 4745 trace.go:236] Trace[1579471398]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (08-Dec-2025 00:07:28.431) (total time: 14815ms): Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[1579471398]: ---"Objects listed" error: 14815ms (00:07:43.246) Dec 08 00:07:43 crc kubenswrapper[4745]: Trace[1579471398]: [14.815970785s] [14.815970785s] END Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.247168 4745 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.248059 4745 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.275529 4745 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38870->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.275606 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38870->192.168.126.11:17697: read: connection reset by peer" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.275902 4745 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.275945 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.276096 4745 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.276116 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.805670 4745 apiserver.go:52] "Watching apiserver" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.809581 4745 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.809827 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.810276 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.810390 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.810474 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.810311 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.810729 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.810807 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.810990 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.811046 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.811229 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.813572 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.814630 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.814631 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.814775 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.815224 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.815290 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.815856 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.816109 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.816257 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.899681 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.905960 4745 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.913448 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.928129 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.938789 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.949916 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950004 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950047 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950081 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950116 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950151 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950185 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950219 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950252 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950291 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950324 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950358 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950405 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950239 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950460 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950440 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950581 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950576 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950640 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950642 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950655 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950703 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950732 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950755 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950780 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950806 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950828 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950863 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950879 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950939 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950973 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950961 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.950998 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951115 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951151 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951180 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951203 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951205 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951206 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951227 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951254 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951273 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951297 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951320 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951341 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951359 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951381 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951405 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951425 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951448 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951467 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951487 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951511 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951530 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951572 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951593 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951615 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951638 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951662 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951697 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951787 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951813 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951833 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951858 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951884 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951902 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951921 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951960 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952096 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952121 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952139 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952162 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952214 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952254 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952274 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952293 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952313 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952334 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952355 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952373 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952391 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952410 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952428 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952447 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952506 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952532 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952548 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952566 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952588 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952605 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952621 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952638 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952659 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952676 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952693 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952713 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952730 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952747 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952765 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952785 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952801 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952818 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952837 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952857 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952877 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952894 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952916 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952950 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952968 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952985 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953002 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953026 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953053 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953070 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953087 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953108 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953127 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953148 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953164 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953180 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953200 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953222 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953240 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953260 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953278 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953296 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953313 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953332 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953351 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953370 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953389 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953409 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953429 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953449 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953468 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953485 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953503 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953521 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953536 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953553 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953570 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953591 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953613 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953635 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951180 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953654 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951224 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953675 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953700 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953719 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953743 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953765 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953792 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953811 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953834 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953852 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953872 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953895 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954173 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954212 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954231 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954250 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954269 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954296 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954315 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954332 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954349 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954369 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954386 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954406 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954426 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954448 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954468 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954487 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954504 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954523 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954543 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954564 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954584 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954604 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954625 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954646 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954666 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954685 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954703 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954723 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954749 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954766 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954788 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954807 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954882 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954917 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954972 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954998 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955018 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955043 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955069 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955094 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955117 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955141 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955158 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955176 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955202 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955220 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955239 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955264 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955284 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955347 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955378 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955401 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955423 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.956256 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.959699 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951253 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951284 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951448 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951492 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951524 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951527 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951550 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951624 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951641 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951706 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951717 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951730 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951779 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951821 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951868 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.951952 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952005 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952060 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952111 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952124 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952173 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.952408 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953586 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953630 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953732 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953854 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.953991 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954403 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954423 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954428 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954439 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954604 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954820 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.954825 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955027 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955100 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955231 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955220 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.955487 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:07:44.455460221 +0000 UTC m=+19.884666531 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.960687 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.960750 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.960801 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.960846 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.960899 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.960979 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961024 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961070 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961115 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961157 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961235 4745 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961263 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961291 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961316 4745 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961341 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961367 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961390 4745 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961412 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961435 4745 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961461 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.961488 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.962572 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.962820 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.963186 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.963574 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.963758 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.963810 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.964046 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.964414 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.964684 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.964715 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.965078 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.965480 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.965190 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.965655 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.965659 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.964724 4745 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966199 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966256 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966259 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966430 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966482 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966694 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966710 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966905 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966918 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.966836 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.967029 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.967561 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.967727 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.955309 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.972453 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.972530 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:44.472510014 +0000 UTC m=+19.901716314 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.972708 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.972745 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:44.47273744 +0000 UTC m=+19.901943740 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.977191 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.977394 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.977449 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.977865 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.977883 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.978170 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.978266 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.978543 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.979134 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.980861 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.981211 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.981645 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.982098 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.988287 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.988665 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.988702 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.988725 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.989188 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.989330 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.989798 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.990110 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.990663 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.990778 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.991383 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.991950 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.991998 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.992051 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:43 crc kubenswrapper[4745]: E1208 00:07:43.992245 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:44.492149005 +0000 UTC m=+19.921355315 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.992415 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.992673 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.995025 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.995107 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.995274 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.995403 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.995613 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.996039 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.996179 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.996774 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.996882 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.998322 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:43 crc kubenswrapper[4745]: I1208 00:07:43.998525 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:43.999937 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000058 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000314 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000374 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000381 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000552 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000712 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000783 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.000943 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.001382 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.001264 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.001626 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.001713 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.002032 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.002179 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.002563 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.002909 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.003196 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.003894 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004237 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004275 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004423 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004623 4745 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12" exitCode=255 Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004682 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12"} Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004725 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004753 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.004782 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.005041 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.005425 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.005527 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.005790 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.005806 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.006065 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.008743 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.009104 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.009164 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.009283 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.009510 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.009692 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.012468 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.012572 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.012825 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.013092 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:44.513065689 +0000 UTC m=+19.942272189 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.013840 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.014531 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.015346 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.015580 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.015651 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.018151 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.018167 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.018265 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.014945 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.018686 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.018906 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.019556 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.019884 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.020633 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.021056 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.022471 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.023011 4745 scope.go:117] "RemoveContainer" containerID="b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.027144 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.027333 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.028109 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.029209 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.040428 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.040557 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.040645 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.040834 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.040905 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.041132 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.041293 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.041651 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.041875 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.041958 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.042214 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.042877 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.042951 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043023 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043124 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043132 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043183 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043701 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043798 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043973 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.043981 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.044543 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.044610 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.044920 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.047704 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.047785 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.047839 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.048716 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.048904 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.050128 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.051031 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.054524 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.060288 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.061272 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.073889 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.073992 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.075344 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.078602 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.078957 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079105 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079171 4745 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079183 4745 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079862 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079887 4745 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079898 4745 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079908 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079917 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079947 4745 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079955 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079967 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.079979 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080110 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080120 4745 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080129 4745 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080141 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080151 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080159 4745 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080168 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080179 4745 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080188 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080198 4745 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080208 4745 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080528 4745 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080548 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080558 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080570 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080579 4745 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080587 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080597 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080608 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080616 4745 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080625 4745 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080633 4745 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080644 4745 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080653 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080661 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080672 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080680 4745 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080689 4745 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080697 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080707 4745 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080717 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080725 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080733 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080744 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080752 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080761 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080770 4745 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080781 4745 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080789 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080799 4745 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080811 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080821 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080839 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080855 4745 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080868 4745 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080878 4745 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080886 4745 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080895 4745 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080907 4745 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080917 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080942 4745 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080951 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080962 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080971 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.080984 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081001 4745 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081011 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081022 4745 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081032 4745 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081043 4745 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081052 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081061 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081071 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081081 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081090 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081098 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081109 4745 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081118 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081127 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081137 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081150 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081160 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081169 4745 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081178 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081190 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081197 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081206 4745 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081214 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081225 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081233 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081241 4745 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081251 4745 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081260 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081269 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081277 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081288 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081297 4745 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081307 4745 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081315 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081325 4745 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081334 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081342 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081352 4745 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081360 4745 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081368 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081376 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081387 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081398 4745 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081260 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081406 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081611 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081628 4745 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081675 4745 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081691 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081710 4745 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081724 4745 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081736 4745 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081752 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081765 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081779 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081793 4745 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081809 4745 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081824 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081837 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081849 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081866 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081880 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081892 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081907 4745 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081920 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081950 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081962 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081978 4745 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.081990 4745 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082002 4745 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082015 4745 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082031 4745 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082043 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082055 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082066 4745 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082082 4745 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082096 4745 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082110 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082126 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082138 4745 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082149 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082161 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082177 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082189 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082201 4745 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082215 4745 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082230 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082242 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082254 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082266 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082283 4745 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082295 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082307 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082323 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082336 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082347 4745 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082359 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082374 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082385 4745 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082397 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082409 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082424 4745 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082437 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082452 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082469 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082482 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082495 4745 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082507 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082523 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082537 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082552 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082565 4745 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082581 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082593 4745 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.082605 4745 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.088218 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.095549 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.105115 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.137740 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.148643 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-ed59e8b7828882809321369bb286b419d2a1f3752a62f2e0f998d45c95988f78 WatchSource:0}: Error finding container ed59e8b7828882809321369bb286b419d2a1f3752a62f2e0f998d45c95988f78: Status 404 returned error can't find the container with id ed59e8b7828882809321369bb286b419d2a1f3752a62f2e0f998d45c95988f78 Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.150729 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.164870 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.175862 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.183041 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.186113 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.189163 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.200392 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.211668 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.220473 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.230515 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.246391 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.258755 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.382328 4745 csr.go:261] certificate signing request csr-jsnlb is approved, waiting to be issued Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.401310 4745 csr.go:257] certificate signing request csr-jsnlb is issued Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.485128 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.485332 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:07:45.485299022 +0000 UTC m=+20.914505322 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.485354 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.485378 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.485531 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.485530 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.485691 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:45.485652271 +0000 UTC m=+20.914858781 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.485723 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:45.485713203 +0000 UTC m=+20.914919703 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.585967 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.586050 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586194 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586210 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586221 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586286 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:45.586273718 +0000 UTC m=+21.015480018 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586505 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586564 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586583 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.586683 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:45.586652018 +0000 UTC m=+21.015858488 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.712841 4745 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713050 4745 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713169 4745 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713202 4745 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713218 4745 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713209 4745 reflector.go:484] object-"openshift-network-node-identity"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713177 4745 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713179 4745 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.713250 4745 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: W1208 00:07:44.714008 4745 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.881876 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:44 crc kubenswrapper[4745]: E1208 00:07:44.882018 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.888709 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.889292 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.890081 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.891413 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.891988 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.892922 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.893553 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.894123 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.895135 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.895623 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.896465 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.897150 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.898111 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.898608 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.900411 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.900960 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.901037 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.901751 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.902174 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.902781 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.903370 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.903841 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.904487 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.904943 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.905597 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.906007 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.906611 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.907225 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.907709 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.910714 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.911404 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.912171 4745 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.912327 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.914330 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.915356 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.915992 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.917592 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.918564 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.918724 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.922317 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.923280 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.925362 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.926233 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.928042 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.929333 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.930764 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.931716 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.933104 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.933794 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.935598 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.936468 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.937330 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.937744 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.938428 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.939227 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.940731 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.941478 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 08 00:07:44 crc kubenswrapper[4745]: I1208 00:07:44.965630 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.018400 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a"} Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.018449 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb"} Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.018461 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"64006ae1f111a4df94b0df0cb30a8c1bf901083caba28eecd01704ead36eadbb"} Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.019855 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"057641e1e91164c419d9cbff58bcde7bb382a2ab76fed2ff1c03f91cf904a189"} Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.020676 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f"} Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.020702 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ed59e8b7828882809321369bb286b419d2a1f3752a62f2e0f998d45c95988f78"} Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.025722 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.026496 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.028220 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe"} Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.067383 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.124499 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.164726 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.195999 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.230739 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.252198 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.267058 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.285904 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.302697 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.314911 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.323776 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-fsd6v"] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.324114 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-6czdv"] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.324362 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-wdjvp"] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.324404 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.324543 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.325606 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-pk459"] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.325802 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.326077 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.333613 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5c9xn"] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.335061 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.335362 4745 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.335421 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.335490 4745 reflector.go:561] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": failed to list *v1.Secret: secrets "machine-config-daemon-dockercfg-r5tcq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.335506 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"machine-config-daemon-dockercfg-r5tcq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-config-daemon-dockercfg-r5tcq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.335684 4745 reflector.go:561] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": failed to list *v1.Secret: secrets "node-resolver-dockercfg-kz9s7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.335706 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"node-resolver-dockercfg-kz9s7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-resolver-dockercfg-kz9s7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.335689 4745 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.335738 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.335782 4745 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.335793 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.335845 4745 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.335860 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.335980 4745 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.335995 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336041 4745 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336056 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336102 4745 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336114 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336179 4745 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336194 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336257 4745 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336270 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.336353 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336506 4745 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336594 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336614 4745 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336633 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336627 4745 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336717 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.336844 4745 reflector.go:561] object-"openshift-multus"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.336884 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: W1208 00:07:45.340747 4745 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-config": failed to list *v1.ConfigMap: configmaps "ovnkube-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.340783 4745 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.345348 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.345377 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.345688 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.345753 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.345823 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.345912 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.367676 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.382221 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.395608 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.402872 4745 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-08 00:02:44 +0000 UTC, rotation deadline is 2026-10-08 00:01:20.850496329 +0000 UTC Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.402959 4745 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7295h53m35.447540458s for next certificate rotation Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.409990 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418641 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-system-cni-dir\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418693 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/44f083ce-ad64-45d5-971c-eca93c5bddd6-proxy-tls\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418723 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44f083ce-ad64-45d5-971c-eca93c5bddd6-mcd-auth-proxy-config\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418742 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-cnibin\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418789 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7js4h\" (UniqueName: \"kubernetes.io/projected/44f083ce-ad64-45d5-971c-eca93c5bddd6-kube-api-access-7js4h\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418855 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/44f083ce-ad64-45d5-971c-eca93c5bddd6-rootfs\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418876 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-os-release\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418897 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-binary-copy\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.418916 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.425279 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.440748 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.454839 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.469279 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.489057 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.515238 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.519560 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.519751 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lg7n\" (UniqueName: \"kubernetes.io/projected/84d30d10-c052-4bf5-85d9-a2d13fff0750-kube-api-access-6lg7n\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.519867 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.51981556 +0000 UTC m=+22.949021860 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.519982 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-cnibin\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520281 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/44f083ce-ad64-45d5-971c-eca93c5bddd6-proxy-tls\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520321 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-systemd-units\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520353 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-node-log\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520396 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-cni-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520434 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-cni-binary-copy\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520467 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-k8s-cni-cncf-io\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520509 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-etc-kubernetes\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520573 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-cnibin\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520618 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520643 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-var-lib-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520673 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520701 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7js4h\" (UniqueName: \"kubernetes.io/projected/44f083ce-ad64-45d5-971c-eca93c5bddd6-kube-api-access-7js4h\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520730 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-hostroot\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520752 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-os-release\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520775 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-ovn-kubernetes\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520798 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-netd\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520820 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxxkb\" (UniqueName: \"kubernetes.io/projected/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-kube-api-access-vxxkb\") pod \"node-resolver-fsd6v\" (UID: \"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\") " pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520842 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-os-release\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520864 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-script-lib\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520888 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb9zl\" (UniqueName: \"kubernetes.io/projected/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-kube-api-access-xb9zl\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520906 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-hosts-file\") pod \"node-resolver-fsd6v\" (UID: \"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\") " pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520945 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-ovn\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520967 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-system-cni-dir\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520993 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-socket-dir-parent\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521015 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-kubelet\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521037 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovn-node-metrics-cert\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521063 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-netns\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521086 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521108 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44f083ce-ad64-45d5-971c-eca93c5bddd6-mcd-auth-proxy-config\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.520676 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-cnibin\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521130 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-cni-multus\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521157 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-daemon-config\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521179 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-etc-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521201 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-cni-bin\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521223 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-kubelet\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521243 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-log-socket\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521274 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.520756 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521312 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-system-cni-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.521418 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.521396291 +0000 UTC m=+22.950602591 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521534 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-os-release\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521578 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-system-cni-dir\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521621 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-slash\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.521671 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521732 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-config\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.521780 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.521767411 +0000 UTC m=+22.950973711 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521824 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/44f083ce-ad64-45d5-971c-eca93c5bddd6-rootfs\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521850 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-binary-copy\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521875 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521897 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-netns\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521938 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.521977 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/44f083ce-ad64-45d5-971c-eca93c5bddd6-rootfs\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.522044 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-env-overrides\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.522076 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-conf-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.522105 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-multus-certs\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.522121 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-bin\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.522141 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q442\" (UniqueName: \"kubernetes.io/projected/73d47ce8-04b5-4dba-aa14-655581a103a8-kube-api-access-7q442\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.522160 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-systemd\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.522396 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/84d30d10-c052-4bf5-85d9-a2d13fff0750-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.544982 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.571254 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.596697 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623203 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-socket-dir-parent\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623244 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-kubelet\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623261 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovn-node-metrics-cert\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623280 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-netns\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623299 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623329 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-cni-multus\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623349 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-daemon-config\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623365 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-etc-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623382 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-cni-bin\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623400 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-kubelet\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623418 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-log-socket\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623422 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-cni-multus\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623449 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623469 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-system-cni-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623480 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-netns\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623488 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-slash\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623505 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-config\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623508 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623538 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-kubelet\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623390 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-socket-dir-parent\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623556 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-netns\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623575 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623600 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-env-overrides\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623618 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-conf-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623635 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-multus-certs\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623653 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-kubelet\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623653 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-bin\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623695 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q442\" (UniqueName: \"kubernetes.io/projected/73d47ce8-04b5-4dba-aa14-655581a103a8-kube-api-access-7q442\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623719 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-log-socket\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623736 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-systemd\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623720 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-systemd\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623766 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lg7n\" (UniqueName: \"kubernetes.io/projected/84d30d10-c052-4bf5-85d9-a2d13fff0750-kube-api-access-6lg7n\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623791 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-cnibin\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623813 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623837 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-systemd-units\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623855 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-node-log\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623872 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-cni-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623890 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-cni-binary-copy\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623907 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-k8s-cni-cncf-io\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623944 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-etc-kubernetes\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623965 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623983 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-var-lib-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623698 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-bin\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624014 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-hostroot\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624032 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-hostroot\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624061 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-ovn-kubernetes\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624080 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-netd\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624099 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxxkb\" (UniqueName: \"kubernetes.io/projected/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-kube-api-access-vxxkb\") pod \"node-resolver-fsd6v\" (UID: \"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\") " pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624119 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-os-release\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.624127 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624136 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-script-lib\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.624143 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624154 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb9zl\" (UniqueName: \"kubernetes.io/projected/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-kube-api-access-xb9zl\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624171 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-hosts-file\") pod \"node-resolver-fsd6v\" (UID: \"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\") " pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624188 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-ovn\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624190 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-system-cni-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624252 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624275 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-ovn-kubernetes\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.624156 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624325 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-hosts-file\") pod \"node-resolver-fsd6v\" (UID: \"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\") " pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624296 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-netd\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624352 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-ovn\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624370 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-slash\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624401 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-netns\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.624468 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.624450892 +0000 UTC m=+23.053657192 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.624726 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-env-overrides\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623618 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-etc-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.623636 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-var-lib-cni-bin\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625110 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-os-release\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625193 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-node-log\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625216 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-conf-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625223 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-cni-dir\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625251 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-multus-certs\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625275 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-script-lib\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625294 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-host-run-k8s-cni-cncf-io\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625327 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-etc-kubernetes\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.625353 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625365 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-var-lib-openvswitch\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.625368 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625391 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73d47ce8-04b5-4dba-aa14-655581a103a8-cnibin\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.625396 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.625420 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-systemd-units\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.625471 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.625450568 +0000 UTC m=+23.054656858 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.628873 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovn-node-metrics-cert\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.649303 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb9zl\" (UniqueName: \"kubernetes.io/projected/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-kube-api-access-xb9zl\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.811421 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.882100 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.882188 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.882319 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:45 crc kubenswrapper[4745]: E1208 00:07:45.882500 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:45 crc kubenswrapper[4745]: I1208 00:07:45.942105 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.030444 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.101611 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.149664 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.154529 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-multus-daemon-config\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.169005 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.175634 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/44f083ce-ad64-45d5-971c-eca93c5bddd6-proxy-tls\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.186245 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.191872 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.243962 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.243975 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.244908 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.252670 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44f083ce-ad64-45d5-971c-eca93c5bddd6-mcd-auth-proxy-config\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.267607 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.290182 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.308567 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.311352 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.385277 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.394539 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.395469 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-config\") pod \"ovnkube-node-5c9xn\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.430355 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.448640 4745 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.450235 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.450278 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.450290 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.450398 4745 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.459181 4745 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.459288 4745 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.460318 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.460344 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.460360 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.460377 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.460388 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.477057 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.482378 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.482413 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.482423 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.482440 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.482451 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.494748 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.498796 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.498826 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.498838 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.498857 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.498869 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.510079 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.514630 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.514665 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.514673 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.514691 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.514700 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.523038 4745 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.523142 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-binary-copy podName:84d30d10-c052-4bf5-85d9-a2d13fff0750 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.023120496 +0000 UTC m=+22.452326796 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-binary-copy") pod "multus-additional-cni-plugins-wdjvp" (UID: "84d30d10-c052-4bf5-85d9-a2d13fff0750") : failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.525107 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.528230 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.528257 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.528268 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.528284 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.528294 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.540662 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.540771 4745 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.540977 4745 projected.go:288] Couldn't get configMap openshift-machine-config-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.542190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.542231 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.542240 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.542254 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.542265 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.573333 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:46 crc kubenswrapper[4745]: W1208 00:07:46.586678 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fc4e04e_a6e2_4897_9549_d7517e1ac92b.slice/crio-d1ddae1e19a2add4afb385b550adeb13712a3a5db0750a4985c52406d83581e3 WatchSource:0}: Error finding container d1ddae1e19a2add4afb385b550adeb13712a3a5db0750a4985c52406d83581e3: Status 404 returned error can't find the container with id d1ddae1e19a2add4afb385b550adeb13712a3a5db0750a4985c52406d83581e3 Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.626047 4745 configmap.go:193] Couldn't get configMap openshift-multus/default-cni-sysctl-allowlist: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.626163 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-sysctl-allowlist podName:84d30d10-c052-4bf5-85d9-a2d13fff0750 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.126137475 +0000 UTC m=+22.555343795 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-sysctl-allowlist" (UniqueName: "kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-sysctl-allowlist") pod "multus-additional-cni-plugins-wdjvp" (UID: "84d30d10-c052-4bf5-85d9-a2d13fff0750") : failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.626261 4745 configmap.go:193] Couldn't get configMap openshift-multus/cni-copy-resources: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.626312 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-cni-binary-copy podName:73d47ce8-04b5-4dba-aa14-655581a103a8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.12630133 +0000 UTC m=+22.555507630 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cni-binary-copy" (UniqueName: "kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-cni-binary-copy") pod "multus-pk459" (UID: "73d47ce8-04b5-4dba-aa14-655581a103a8") : failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.631408 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.644675 4745 projected.go:288] Couldn't get configMap openshift-dns/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.644720 4745 projected.go:194] Error preparing data for projected volume kube-api-access-vxxkb for pod openshift-dns/node-resolver-fsd6v: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.644777 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-kube-api-access-vxxkb podName:0aa20835-0c9d-4fc2-865f-0ec5b1633d5c nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.14475951 +0000 UTC m=+22.573965820 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-vxxkb" (UniqueName: "kubernetes.io/projected/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-kube-api-access-vxxkb") pod "node-resolver-fsd6v" (UID: "0aa20835-0c9d-4fc2-865f-0ec5b1633d5c") : failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.645320 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.645371 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.645394 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.645405 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.645421 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.645432 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.646445 4745 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.652594 4745 projected.go:288] Couldn't get configMap openshift-multus/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.654337 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.676913 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.685046 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.739741 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.743541 4745 projected.go:194] Error preparing data for projected volume kube-api-access-7q442 for pod openshift-multus/multus-pk459: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.743624 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/73d47ce8-04b5-4dba-aa14-655581a103a8-kube-api-access-7q442 podName:73d47ce8-04b5-4dba-aa14-655581a103a8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.243603361 +0000 UTC m=+22.672809671 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-7q442" (UniqueName: "kubernetes.io/projected/73d47ce8-04b5-4dba-aa14-655581a103a8-kube-api-access-7q442") pod "multus-pk459" (UID: "73d47ce8-04b5-4dba-aa14-655581a103a8") : failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.746593 4745 projected.go:194] Error preparing data for projected volume kube-api-access-6lg7n for pod openshift-multus/multus-additional-cni-plugins-wdjvp: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.746723 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/84d30d10-c052-4bf5-85d9-a2d13fff0750-kube-api-access-6lg7n podName:84d30d10-c052-4bf5-85d9-a2d13fff0750 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.246684791 +0000 UTC m=+22.675891131 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-6lg7n" (UniqueName: "kubernetes.io/projected/84d30d10-c052-4bf5-85d9-a2d13fff0750-kube-api-access-6lg7n") pod "multus-additional-cni-plugins-wdjvp" (UID: "84d30d10-c052-4bf5-85d9-a2d13fff0750") : failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.748694 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.748743 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.748761 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.748785 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.748801 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.797080 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.851645 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.851698 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.851715 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.851741 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.851758 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.882038 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.882212 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.897686 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.901500 4745 projected.go:194] Error preparing data for projected volume kube-api-access-7js4h for pod openshift-machine-config-operator/machine-config-daemon-6czdv: failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: E1208 00:07:46.901585 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/44f083ce-ad64-45d5-971c-eca93c5bddd6-kube-api-access-7js4h podName:44f083ce-ad64-45d5-971c-eca93c5bddd6 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:47.401562989 +0000 UTC m=+22.830769289 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-7js4h" (UniqueName: "kubernetes.io/projected/44f083ce-ad64-45d5-971c-eca93c5bddd6-kube-api-access-7js4h") pod "machine-config-daemon-6czdv" (UID: "44f083ce-ad64-45d5-971c-eca93c5bddd6") : failed to sync configmap cache: timed out waiting for the condition Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.953613 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.953647 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.953657 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.953671 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:46 crc kubenswrapper[4745]: I1208 00:07:46.953680 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:46Z","lastTransitionTime":"2025-12-08T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.034725 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.036766 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11" exitCode=0 Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.036885 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.036978 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"d1ddae1e19a2add4afb385b550adeb13712a3a5db0750a4985c52406d83581e3"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.041746 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-binary-copy\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.042760 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-binary-copy\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.064295 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.065486 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.065517 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.065529 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.065548 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.065561 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.086285 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.099179 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.123736 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.134600 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.142850 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-cni-binary-copy\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.143081 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.144410 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/84d30d10-c052-4bf5-85d9-a2d13fff0750-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.145468 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73d47ce8-04b5-4dba-aa14-655581a103a8-cni-binary-copy\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.150532 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.168575 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.168643 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.168660 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.168689 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.168708 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.171066 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.185553 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.215315 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.235351 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.245821 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxxkb\" (UniqueName: \"kubernetes.io/projected/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-kube-api-access-vxxkb\") pod \"node-resolver-fsd6v\" (UID: \"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\") " pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.245978 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q442\" (UniqueName: \"kubernetes.io/projected/73d47ce8-04b5-4dba-aa14-655581a103a8-kube-api-access-7q442\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.252271 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q442\" (UniqueName: \"kubernetes.io/projected/73d47ce8-04b5-4dba-aa14-655581a103a8-kube-api-access-7q442\") pod \"multus-pk459\" (UID: \"73d47ce8-04b5-4dba-aa14-655581a103a8\") " pod="openshift-multus/multus-pk459" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.252889 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxxkb\" (UniqueName: \"kubernetes.io/projected/0aa20835-0c9d-4fc2-865f-0ec5b1633d5c-kube-api-access-vxxkb\") pod \"node-resolver-fsd6v\" (UID: \"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\") " pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.258093 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.271479 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.271525 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.271539 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.271561 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.271575 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.277221 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.293689 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.316304 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.340816 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.347492 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lg7n\" (UniqueName: \"kubernetes.io/projected/84d30d10-c052-4bf5-85d9-a2d13fff0750-kube-api-access-6lg7n\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.351328 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lg7n\" (UniqueName: \"kubernetes.io/projected/84d30d10-c052-4bf5-85d9-a2d13fff0750-kube-api-access-6lg7n\") pod \"multus-additional-cni-plugins-wdjvp\" (UID: \"84d30d10-c052-4bf5-85d9-a2d13fff0750\") " pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.362026 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.374892 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.375083 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.375203 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.375333 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.375438 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.379792 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.399621 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.416540 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.434106 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.448381 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7js4h\" (UniqueName: \"kubernetes.io/projected/44f083ce-ad64-45d5-971c-eca93c5bddd6-kube-api-access-7js4h\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.452261 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fsd6v" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.453134 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7js4h\" (UniqueName: \"kubernetes.io/projected/44f083ce-ad64-45d5-971c-eca93c5bddd6-kube-api-access-7js4h\") pod \"machine-config-daemon-6czdv\" (UID: \"44f083ce-ad64-45d5-971c-eca93c5bddd6\") " pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.457068 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.461215 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pk459" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.469451 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.478183 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.478553 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.478666 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.478741 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.478822 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.478952 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: W1208 00:07:47.479980 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0aa20835_0c9d_4fc2_865f_0ec5b1633d5c.slice/crio-85181b8c5e89b3d1c094f7a31f1e51f0cfd0cb1e500ec3edd1f0753e33f4e865 WatchSource:0}: Error finding container 85181b8c5e89b3d1c094f7a31f1e51f0cfd0cb1e500ec3edd1f0753e33f4e865: Status 404 returned error can't find the container with id 85181b8c5e89b3d1c094f7a31f1e51f0cfd0cb1e500ec3edd1f0753e33f4e865 Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.497518 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: W1208 00:07:47.498530 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84d30d10_c052_4bf5_85d9_a2d13fff0750.slice/crio-795895dc1a1c065f7da88654266adaba4deed97ce1bee32ba75715af74d04a0e WatchSource:0}: Error finding container 795895dc1a1c065f7da88654266adaba4deed97ce1bee32ba75715af74d04a0e: Status 404 returned error can't find the container with id 795895dc1a1c065f7da88654266adaba4deed97ce1bee32ba75715af74d04a0e Dec 08 00:07:47 crc kubenswrapper[4745]: W1208 00:07:47.505433 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73d47ce8_04b5_4dba_aa14_655581a103a8.slice/crio-0bb18c3c9311c6ad9e50cfe0e05059df0946555e11a7d2ae8c7a6919af3a22db WatchSource:0}: Error finding container 0bb18c3c9311c6ad9e50cfe0e05059df0946555e11a7d2ae8c7a6919af3a22db: Status 404 returned error can't find the container with id 0bb18c3c9311c6ad9e50cfe0e05059df0946555e11a7d2ae8c7a6919af3a22db Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.520686 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.549663 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.549751 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.549784 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.549891 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.549960 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:51.549947514 +0000 UTC m=+26.979153814 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.550376 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:07:51.550366465 +0000 UTC m=+26.979572765 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.550418 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.550441 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:51.550435487 +0000 UTC m=+26.979641787 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.552339 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.571761 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.581025 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.581046 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.581054 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.581068 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.581078 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.650500 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.650823 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.650772 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.650978 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.650989 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.651023 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:51.651011543 +0000 UTC m=+27.080217843 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.650960 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.651041 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.651047 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.651067 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:51.651061904 +0000 UTC m=+27.080268204 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.684953 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.685007 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.685026 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.685051 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.685069 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.745539 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.748953 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.765894 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.767555 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.771774 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.788195 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.790256 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.790286 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.790296 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.790317 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.790327 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.809032 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.855235 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.876350 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.881778 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.881949 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.882040 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:47 crc kubenswrapper[4745]: E1208 00:07:47.882164 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.891268 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.895048 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.895082 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.895092 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.895111 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.895124 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.909752 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.929765 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.945886 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.957402 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.969115 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.982634 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.994766 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.998513 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.998564 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.998581 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.998609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:47 crc kubenswrapper[4745]: I1208 00:07:47.998622 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:47Z","lastTransitionTime":"2025-12-08T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.010713 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-q7cpd"] Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.011769 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.015207 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.015445 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.016527 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.020539 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.022648 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.045032 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.047368 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.047424 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.047438 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"7ceb40e935ace4c459706e4edf38a692b50a3e86ce5a7a1b119170ef95e886d3"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.049116 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fsd6v" event={"ID":"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c","Type":"ContainerStarted","Data":"6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.049175 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fsd6v" event={"ID":"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c","Type":"ContainerStarted","Data":"85181b8c5e89b3d1c094f7a31f1e51f0cfd0cb1e500ec3edd1f0753e33f4e865"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.053057 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.053108 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.053126 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.053138 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.053150 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.053164 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.054602 4745 generic.go:334] "Generic (PLEG): container finished" podID="84d30d10-c052-4bf5-85d9-a2d13fff0750" containerID="9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef" exitCode=0 Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.054663 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerDied","Data":"9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.054687 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerStarted","Data":"795895dc1a1c065f7da88654266adaba4deed97ce1bee32ba75715af74d04a0e"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.057070 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3283a9b2-6c40-47e3-a219-3e203a77ad0b-host\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.057142 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltwtx\" (UniqueName: \"kubernetes.io/projected/3283a9b2-6c40-47e3-a219-3e203a77ad0b-kube-api-access-ltwtx\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.057193 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3283a9b2-6c40-47e3-a219-3e203a77ad0b-serviceca\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.057273 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerStarted","Data":"c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.057327 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerStarted","Data":"0bb18c3c9311c6ad9e50cfe0e05059df0946555e11a7d2ae8c7a6919af3a22db"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.059900 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.081620 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.099662 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.101674 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.101720 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.101732 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.101751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.101764 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.121025 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.133711 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.146960 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.158065 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltwtx\" (UniqueName: \"kubernetes.io/projected/3283a9b2-6c40-47e3-a219-3e203a77ad0b-kube-api-access-ltwtx\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.158300 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3283a9b2-6c40-47e3-a219-3e203a77ad0b-serviceca\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.158365 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3283a9b2-6c40-47e3-a219-3e203a77ad0b-host\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.159270 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3283a9b2-6c40-47e3-a219-3e203a77ad0b-host\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.160658 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3283a9b2-6c40-47e3-a219-3e203a77ad0b-serviceca\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.169240 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.174808 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltwtx\" (UniqueName: \"kubernetes.io/projected/3283a9b2-6c40-47e3-a219-3e203a77ad0b-kube-api-access-ltwtx\") pod \"node-ca-q7cpd\" (UID: \"3283a9b2-6c40-47e3-a219-3e203a77ad0b\") " pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.182649 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.203242 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.206037 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.206104 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.206122 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.206145 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.206162 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.217289 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.229534 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.252572 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.264471 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.284176 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.294843 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.307680 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.307739 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.307753 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.307773 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.307790 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.310344 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.320792 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.327813 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-q7cpd" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.335349 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: W1208 00:07:48.338402 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3283a9b2_6c40_47e3_a219_3e203a77ad0b.slice/crio-af30332596067d5b5068730adad2c4f321e9fd7c7021eaf0d8457a75fbf3202c WatchSource:0}: Error finding container af30332596067d5b5068730adad2c4f321e9fd7c7021eaf0d8457a75fbf3202c: Status 404 returned error can't find the container with id af30332596067d5b5068730adad2c4f321e9fd7c7021eaf0d8457a75fbf3202c Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.354625 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.374958 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.395824 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.410635 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.410676 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.410692 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.410706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.410715 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.417457 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.431352 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.449277 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.462043 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.477482 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.489988 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.512997 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.513052 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.513065 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.513087 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.513101 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.615504 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.615550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.615562 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.615580 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.615592 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.719353 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.719831 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.719847 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.719867 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.719887 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.822714 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.822937 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.823013 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.823086 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.823151 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.870027 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.873483 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.881876 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:48 crc kubenswrapper[4745]: E1208 00:07:48.881993 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.888545 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.905039 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.922770 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.925429 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.925462 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.925476 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.925492 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.925504 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:48Z","lastTransitionTime":"2025-12-08T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.936860 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.957552 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.981358 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:48 crc kubenswrapper[4745]: I1208 00:07:48.997118 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.012673 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.028316 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.028352 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.028363 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.028381 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.028393 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.032238 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.047846 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.060402 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-q7cpd" event={"ID":"3283a9b2-6c40-47e3-a219-3e203a77ad0b","Type":"ContainerStarted","Data":"fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.060459 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-q7cpd" event={"ID":"3283a9b2-6c40-47e3-a219-3e203a77ad0b","Type":"ContainerStarted","Data":"af30332596067d5b5068730adad2c4f321e9fd7c7021eaf0d8457a75fbf3202c"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.061841 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.064599 4745 generic.go:334] "Generic (PLEG): container finished" podID="84d30d10-c052-4bf5-85d9-a2d13fff0750" containerID="71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667" exitCode=0 Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.064766 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerDied","Data":"71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.078684 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.106521 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.126020 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.131382 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.131408 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.131416 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.131430 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.131440 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.145418 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.175294 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.202018 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.216704 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.239592 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.239800 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.239807 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.239821 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.239830 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.240886 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.267321 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.282147 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.293511 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.314788 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.329744 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.339917 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.341727 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.341772 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.341781 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.341797 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.341807 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.353328 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.368976 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.380726 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.391578 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.400905 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:49Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.444176 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.444217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.444231 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.444248 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.444257 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.547637 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.547676 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.547687 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.547708 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.547722 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.650528 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.650571 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.650584 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.650603 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.650615 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.753885 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.754021 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.754042 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.754072 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.754092 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.857080 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.857121 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.857134 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.857156 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.857170 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.881973 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.882056 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:49 crc kubenswrapper[4745]: E1208 00:07:49.882176 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:49 crc kubenswrapper[4745]: E1208 00:07:49.882422 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.960237 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.960284 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.960293 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.960318 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:49 crc kubenswrapper[4745]: I1208 00:07:49.960330 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:49Z","lastTransitionTime":"2025-12-08T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.063858 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.063989 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.064014 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.064041 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.064059 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.071138 4745 generic.go:334] "Generic (PLEG): container finished" podID="84d30d10-c052-4bf5-85d9-a2d13fff0750" containerID="d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83" exitCode=0 Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.071184 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerDied","Data":"d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.077059 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.094053 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.113779 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.130056 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.160903 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.174182 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.174280 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.174297 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.174317 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.174330 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.178205 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.195552 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.212879 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.233312 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.246093 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.271301 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.278031 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.278055 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.278062 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.278075 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.278084 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.289769 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.305200 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.321475 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.340019 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.353852 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:50Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.382064 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.382113 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.382127 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.382147 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.382160 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.484871 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.484913 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.484944 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.484960 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.484971 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.588578 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.588637 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.588655 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.588683 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.588700 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.691605 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.691641 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.691650 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.691666 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.691676 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.794536 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.794596 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.794608 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.794629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.794642 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.882500 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:50 crc kubenswrapper[4745]: E1208 00:07:50.882656 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.899575 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.899638 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.899650 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.899669 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:50 crc kubenswrapper[4745]: I1208 00:07:50.899683 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:50Z","lastTransitionTime":"2025-12-08T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.001963 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.002021 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.002035 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.002055 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.002069 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.083730 4745 generic.go:334] "Generic (PLEG): container finished" podID="84d30d10-c052-4bf5-85d9-a2d13fff0750" containerID="120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8" exitCode=0 Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.083789 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerDied","Data":"120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.100606 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.104850 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.104923 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.104979 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.105007 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.105027 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.117691 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.136895 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.156573 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.181067 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.204658 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.209202 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.209252 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.209266 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.209287 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.209303 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.225487 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.245734 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.271209 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.306220 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.311448 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.311487 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.311498 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.311514 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.311526 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.323274 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.347104 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.361073 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.377905 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.393368 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:51Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.414276 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.414317 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.414329 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.414344 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.414357 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.517752 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.517815 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.517833 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.517858 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.517877 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.602683 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.602872 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.603013 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:07:59.602973015 +0000 UTC m=+35.032179345 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.603060 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.603146 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:59.603124349 +0000 UTC m=+35.032330679 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.603186 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.603337 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.603416 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:59.603401826 +0000 UTC m=+35.032608166 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.620704 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.620750 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.620767 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.620787 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.620801 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.703960 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.704046 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704265 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704304 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704330 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704264 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704417 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:59.704389393 +0000 UTC m=+35.133595733 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704433 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704460 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.704533 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:07:59.704504366 +0000 UTC m=+35.133710696 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.724056 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.724121 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.724144 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.724177 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.724200 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.827593 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.827656 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.827674 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.827699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.827715 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.881661 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.881703 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.881841 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:51 crc kubenswrapper[4745]: E1208 00:07:51.882007 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.930663 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.930729 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.930747 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.930774 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:51 crc kubenswrapper[4745]: I1208 00:07:51.930791 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:51Z","lastTransitionTime":"2025-12-08T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.066861 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.066950 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.066964 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.066985 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.066997 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.093704 4745 generic.go:334] "Generic (PLEG): container finished" podID="84d30d10-c052-4bf5-85d9-a2d13fff0750" containerID="3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752" exitCode=0 Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.093776 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerDied","Data":"3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.117038 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.146194 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.164146 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.171478 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.171513 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.171525 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.171539 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.171548 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.200153 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.218947 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.237214 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.263063 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.273787 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.273829 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.273845 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.273865 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.273953 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.279894 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.294161 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.319502 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.336264 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.350841 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.364126 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.377281 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.377321 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.377333 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.377351 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.377363 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.383750 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.398461 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:52Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.480531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.480571 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.480579 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.480592 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.480601 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.584550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.584583 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.584595 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.584612 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.584626 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.687699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.687775 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.687795 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.687826 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.687846 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.791657 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.791751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.791778 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.791811 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.791837 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.882290 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:52 crc kubenswrapper[4745]: E1208 00:07:52.882532 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.894760 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.894817 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.894836 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.894862 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.894888 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.998280 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.998334 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.998344 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.998372 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:52 crc kubenswrapper[4745]: I1208 00:07:52.998387 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:52Z","lastTransitionTime":"2025-12-08T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.100564 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.100628 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.100650 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.100682 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.100708 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.105440 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.105519 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.105544 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.122522 4745 generic.go:334] "Generic (PLEG): container finished" podID="84d30d10-c052-4bf5-85d9-a2d13fff0750" containerID="987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388" exitCode=0 Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.122579 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerDied","Data":"987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.129088 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.152698 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.176355 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.195903 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.198652 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.198788 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.203530 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.203563 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.203574 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.203587 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.203599 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.219258 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.242786 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.259783 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.276749 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.295302 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.307166 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.307225 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.307249 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.307280 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.307302 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.312949 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.322282 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.334028 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.346709 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.360766 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.372755 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.385367 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.397315 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.407755 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.410791 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.410901 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.410989 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.411075 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.411147 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.424916 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.447280 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.460856 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.475430 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.489700 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.512178 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.513590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.513657 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.513676 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.513729 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.513750 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.534957 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.579075 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.591646 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.604950 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.615189 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.615215 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.615223 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.615236 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.615245 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.615918 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.624743 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:53Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.718653 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.718688 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.718696 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.718711 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.718723 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.822406 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.822461 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.822479 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.822504 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.822523 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.882161 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.882310 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:53 crc kubenswrapper[4745]: E1208 00:07:53.882390 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:53 crc kubenswrapper[4745]: E1208 00:07:53.882552 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.925838 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.925960 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.925987 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.926020 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:53 crc kubenswrapper[4745]: I1208 00:07:53.926044 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:53Z","lastTransitionTime":"2025-12-08T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.029602 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.029687 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.029708 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.029736 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.029759 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.131686 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.131762 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.131789 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.131820 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.131843 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.133088 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" event={"ID":"84d30d10-c052-4bf5-85d9-a2d13fff0750","Type":"ContainerStarted","Data":"f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.133198 4745 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.234638 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.234693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.234710 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.234734 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.234751 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.337160 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.337220 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.337240 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.337266 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.337282 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.439756 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.439796 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.439809 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.439825 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.439838 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.543460 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.543502 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.543514 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.543531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.543543 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.646740 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.646810 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.646827 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.646851 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.646868 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.748999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.749063 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.749081 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.749105 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.749125 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.851864 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.851964 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.851990 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.852022 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.852043 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.881777 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:54 crc kubenswrapper[4745]: E1208 00:07:54.882027 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.907220 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:54Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.925680 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:54Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.947654 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:54Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.954544 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.954579 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.954597 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.954620 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.954637 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:54Z","lastTransitionTime":"2025-12-08T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.977515 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:54Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:54 crc kubenswrapper[4745]: I1208 00:07:54.992909 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:54Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.015244 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.034355 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.057992 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.058312 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.058359 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.058378 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.058406 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.058491 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.078389 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.116868 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.136652 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.137844 4745 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.158682 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.161609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.161679 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.161699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.161727 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.161747 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.176869 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.195769 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.207277 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.216483 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.228952 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.248068 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.264997 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.265031 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.265042 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.265058 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.265070 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.277485 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.298047 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.319263 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.338574 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.358370 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.367739 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.367792 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.367811 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.367836 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.367853 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.373984 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.407018 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.425759 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.441506 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.454338 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.468785 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.471312 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.471375 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.471395 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.471419 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.471468 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.489006 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:55Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.574249 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.574320 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.574338 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.574368 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.574388 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.677472 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.677550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.677574 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.677603 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.677625 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.780165 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.780231 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.780249 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.780277 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.780294 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.881645 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.881693 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:55 crc kubenswrapper[4745]: E1208 00:07:55.881798 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:55 crc kubenswrapper[4745]: E1208 00:07:55.882069 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.884059 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.884136 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.884365 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.884418 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.884444 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.987419 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.987483 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.987502 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.987528 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:55 crc kubenswrapper[4745]: I1208 00:07:55.987549 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:55Z","lastTransitionTime":"2025-12-08T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.090514 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.090574 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.090594 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.090622 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.090642 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.143884 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/0.log" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.147648 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385" exitCode=1 Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.147721 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.149044 4745 scope.go:117] "RemoveContainer" containerID="323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.179373 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.195023 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.195087 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.195112 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.195143 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.195167 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.203054 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.223320 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.245484 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.274394 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.291192 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.298770 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.298814 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.298831 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.298855 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.298873 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.313159 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.333089 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.352433 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.372054 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.386698 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.402068 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.402387 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.402412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.402427 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.402448 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.402463 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.422542 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.438362 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.468304 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:07:55.094396 6060 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 00:07:55.094439 6060 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 00:07:55.094448 6060 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 00:07:55.094498 6060 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1208 00:07:55.094525 6060 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 00:07:55.094538 6060 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 00:07:55.094541 6060 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1208 00:07:55.094548 6060 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 00:07:55.094560 6060 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1208 00:07:55.094565 6060 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1208 00:07:55.094601 6060 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 00:07:55.094661 6060 factory.go:656] Stopping watch factory\\\\nI1208 00:07:55.094687 6060 ovnkube.go:599] Stopped ovnkube\\\\nI1208 00:07:55.094692 6060 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 00:07:55.094720 6060 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.504824 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.504868 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.504880 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.504899 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.504912 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.611412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.611469 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.611487 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.611509 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.611525 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.713412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.713452 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.713462 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.713477 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.713486 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.800565 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.800606 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.800619 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.800634 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.800645 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: E1208 00:07:56.816602 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.820821 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.820860 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.820871 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.820888 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.820900 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: E1208 00:07:56.837841 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.841227 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.841264 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.841276 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.841292 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.841303 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: E1208 00:07:56.859827 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.863411 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.863453 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.863465 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.863483 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.863494 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: E1208 00:07:56.880817 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.882023 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:56 crc kubenswrapper[4745]: E1208 00:07:56.882152 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.884399 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.884440 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.884452 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.884467 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.884478 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:56 crc kubenswrapper[4745]: E1208 00:07:56.904489 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:56Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:56 crc kubenswrapper[4745]: E1208 00:07:56.904609 4745 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.905893 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.905962 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.905988 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.906018 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:56 crc kubenswrapper[4745]: I1208 00:07:56.906036 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:56Z","lastTransitionTime":"2025-12-08T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.008043 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.008078 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.008089 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.008106 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.008117 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.111078 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.111125 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.111137 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.111158 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.111171 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.154766 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/0.log" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.159187 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.159373 4745 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.179335 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.196114 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.210769 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.213652 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.213689 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.213706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.213729 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.213748 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.233700 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.249672 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.271357 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.292747 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.313558 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.316826 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.316882 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.316895 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.316913 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.316951 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.345375 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:07:55.094396 6060 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 00:07:55.094439 6060 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 00:07:55.094448 6060 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 00:07:55.094498 6060 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1208 00:07:55.094525 6060 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 00:07:55.094538 6060 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 00:07:55.094541 6060 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1208 00:07:55.094548 6060 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 00:07:55.094560 6060 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1208 00:07:55.094565 6060 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1208 00:07:55.094601 6060 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 00:07:55.094661 6060 factory.go:656] Stopping watch factory\\\\nI1208 00:07:55.094687 6060 ovnkube.go:599] Stopped ovnkube\\\\nI1208 00:07:55.094692 6060 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 00:07:55.094720 6060 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.366758 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.387044 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.404456 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.419933 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.419989 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.420000 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.420015 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.420028 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.428667 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.443409 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.482289 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:57Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.523326 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.523389 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.523408 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.523434 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.523452 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.626460 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.626513 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.626531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.626556 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.626573 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.729336 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.729404 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.729422 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.729450 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.729468 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.832890 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.832970 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.833007 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.833033 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.833046 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.882068 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.882160 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:57 crc kubenswrapper[4745]: E1208 00:07:57.882270 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:57 crc kubenswrapper[4745]: E1208 00:07:57.882376 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.937056 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.937124 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.937145 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.937171 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:57 crc kubenswrapper[4745]: I1208 00:07:57.937190 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:57Z","lastTransitionTime":"2025-12-08T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.039953 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.040017 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.040029 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.040046 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.040056 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:58Z","lastTransitionTime":"2025-12-08T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.142885 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.142976 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.142994 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.143021 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.143042 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:58Z","lastTransitionTime":"2025-12-08T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.165285 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/1.log" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.166338 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/0.log" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.171061 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8" exitCode=1 Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.171148 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8"} Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.171256 4745 scope.go:117] "RemoveContainer" containerID="323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.173314 4745 scope.go:117] "RemoveContainer" containerID="5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8" Dec 08 00:07:58 crc kubenswrapper[4745]: E1208 00:07:58.173637 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.205246 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.225229 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.245126 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.245335 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.245370 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.245387 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.245407 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.245423 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:58Z","lastTransitionTime":"2025-12-08T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.257555 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.268282 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.282874 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.295034 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.308753 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.338589 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:07:55.094396 6060 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 00:07:55.094439 6060 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 00:07:55.094448 6060 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 00:07:55.094498 6060 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1208 00:07:55.094525 6060 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 00:07:55.094538 6060 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 00:07:55.094541 6060 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1208 00:07:55.094548 6060 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 00:07:55.094560 6060 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1208 00:07:55.094565 6060 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1208 00:07:55.094601 6060 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 00:07:55.094661 6060 factory.go:656] Stopping watch factory\\\\nI1208 00:07:55.094687 6060 ovnkube.go:599] Stopped ovnkube\\\\nI1208 00:07:55.094692 6060 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 00:07:55.094720 6060 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.347956 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.347996 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.348007 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.348023 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.348034 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:58Z","lastTransitionTime":"2025-12-08T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.369606 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.382983 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.396037 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.411680 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.434456 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.449783 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:58Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.450683 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.450718 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.450733 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.450753 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.450768 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:58Z","lastTransitionTime":"2025-12-08T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.553448 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.553504 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.553517 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.553534 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:58 crc kubenswrapper[4745]: I1208 00:07:58.553546 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:58Z","lastTransitionTime":"2025-12-08T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.175064 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/1.log" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.251855 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.252068 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.252166 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.252263 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.262663 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.262750 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.262778 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.262809 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.262833 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.275366 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx"] Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.276312 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.278967 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.279211 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.290212 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.303125 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.316858 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.328373 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.344901 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.362066 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:07:55.094396 6060 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 00:07:55.094439 6060 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 00:07:55.094448 6060 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 00:07:55.094498 6060 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1208 00:07:55.094525 6060 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 00:07:55.094538 6060 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 00:07:55.094541 6060 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1208 00:07:55.094548 6060 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 00:07:55.094560 6060 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1208 00:07:55.094565 6060 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1208 00:07:55.094601 6060 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 00:07:55.094661 6060 factory.go:656] Stopping watch factory\\\\nI1208 00:07:55.094687 6060 ovnkube.go:599] Stopped ovnkube\\\\nI1208 00:07:55.094692 6060 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 00:07:55.094720 6060 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.365756 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.366041 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.366178 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.366297 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.366406 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.380662 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.392797 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.400686 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/494c0a99-4094-400f-a072-51183fae347d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.400879 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvntr\" (UniqueName: \"kubernetes.io/projected/494c0a99-4094-400f-a072-51183fae347d-kube-api-access-mvntr\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.400985 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/494c0a99-4094-400f-a072-51183fae347d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.401080 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/494c0a99-4094-400f-a072-51183fae347d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.405816 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.418578 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.432990 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.444781 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.461199 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.469298 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.469345 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.469356 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.469375 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.469384 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.475835 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.486876 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.497020 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:07:59Z is after 2025-08-24T17:21:41Z" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.502647 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/494c0a99-4094-400f-a072-51183fae347d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.502693 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvntr\" (UniqueName: \"kubernetes.io/projected/494c0a99-4094-400f-a072-51183fae347d-kube-api-access-mvntr\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.502716 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/494c0a99-4094-400f-a072-51183fae347d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.502740 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/494c0a99-4094-400f-a072-51183fae347d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.503713 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/494c0a99-4094-400f-a072-51183fae347d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.503738 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/494c0a99-4094-400f-a072-51183fae347d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.509272 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/494c0a99-4094-400f-a072-51183fae347d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.516252 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvntr\" (UniqueName: \"kubernetes.io/projected/494c0a99-4094-400f-a072-51183fae347d-kube-api-access-mvntr\") pod \"ovnkube-control-plane-749d76644c-lsqkx\" (UID: \"494c0a99-4094-400f-a072-51183fae347d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.571845 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.572130 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.572217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.572303 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.572454 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.596252 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.603500 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.603667 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:08:15.603643945 +0000 UTC m=+51.032850245 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.603737 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.603789 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.603894 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.603915 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.603977 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:15.603966363 +0000 UTC m=+51.033172653 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.603992 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:15.603984764 +0000 UTC m=+51.033191064 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: W1208 00:07:59.614324 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod494c0a99_4094_400f_a072_51183fae347d.slice/crio-0a0dc91a4138ff3c3c63c68a2905d4998c3d944b67c9984aa2d5d92ae122f2ed WatchSource:0}: Error finding container 0a0dc91a4138ff3c3c63c68a2905d4998c3d944b67c9984aa2d5d92ae122f2ed: Status 404 returned error can't find the container with id 0a0dc91a4138ff3c3c63c68a2905d4998c3d944b67c9984aa2d5d92ae122f2ed Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.675999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.676052 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.676067 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.676088 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.676105 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.704564 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.704835 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.704900 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.704926 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.705049 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:15.705023002 +0000 UTC m=+51.134229342 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.779228 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.779273 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.779287 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.779307 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.779320 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.805427 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.805701 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.805758 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.805784 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.805891 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:15.805865495 +0000 UTC m=+51.235071825 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.882173 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:07:59 crc kubenswrapper[4745]: E1208 00:07:59.882387 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.883155 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.883209 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.883228 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.883254 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.883271 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.986350 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.986409 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.986427 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.986451 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:07:59 crc kubenswrapper[4745]: I1208 00:07:59.986561 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:07:59Z","lastTransitionTime":"2025-12-08T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.089365 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.089420 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.089437 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.089460 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.089477 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.181283 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" event={"ID":"494c0a99-4094-400f-a072-51183fae347d","Type":"ContainerStarted","Data":"0a0dc91a4138ff3c3c63c68a2905d4998c3d944b67c9984aa2d5d92ae122f2ed"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.191752 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.191790 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.191798 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.191812 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.191821 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.294919 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.295052 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.295142 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.295171 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.295188 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.398082 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.398149 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.398166 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.398190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.398208 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.450713 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-zpkz9"] Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.451418 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:00 crc kubenswrapper[4745]: E1208 00:08:00.451510 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.486692 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.501203 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.501276 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.501294 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.501319 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.501336 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.512629 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.513240 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.513331 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8g2x\" (UniqueName: \"kubernetes.io/projected/c402d875-2477-4bda-872a-da631b5b5ff7-kube-api-access-g8g2x\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.532452 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.554779 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.581565 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.601164 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.604558 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.604622 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.604640 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.604666 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.604684 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.613914 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.614013 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8g2x\" (UniqueName: \"kubernetes.io/projected/c402d875-2477-4bda-872a-da631b5b5ff7-kube-api-access-g8g2x\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:00 crc kubenswrapper[4745]: E1208 00:08:00.614422 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:00 crc kubenswrapper[4745]: E1208 00:08:00.614555 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:01.114513577 +0000 UTC m=+36.543719917 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.633398 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.649240 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8g2x\" (UniqueName: \"kubernetes.io/projected/c402d875-2477-4bda-872a-da631b5b5ff7-kube-api-access-g8g2x\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.649625 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.665501 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.678436 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.692876 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.706108 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.709511 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.709610 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.709629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.709652 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.709665 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.723510 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.742786 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.757433 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.778634 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://323e5bf56231b60beb6a8fc4dace5c513aabfd00ad4237c0fc1be35bfbd5a385\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"message\\\":\\\"qos/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:07:55.094396 6060 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 00:07:55.094439 6060 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1208 00:07:55.094448 6060 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1208 00:07:55.094498 6060 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1208 00:07:55.094525 6060 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 00:07:55.094538 6060 handler.go:208] Removed *v1.Node event handler 2\\\\nI1208 00:07:55.094541 6060 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1208 00:07:55.094548 6060 handler.go:208] Removed *v1.Node event handler 7\\\\nI1208 00:07:55.094560 6060 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1208 00:07:55.094565 6060 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1208 00:07:55.094601 6060 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 00:07:55.094661 6060 factory.go:656] Stopping watch factory\\\\nI1208 00:07:55.094687 6060 ovnkube.go:599] Stopped ovnkube\\\\nI1208 00:07:55.094692 6060 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 00:07:55.094720 6060 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.796977 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:00Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.813740 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.813782 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.813791 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.813807 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.813817 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.882396 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.882556 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:00 crc kubenswrapper[4745]: E1208 00:08:00.882785 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:00 crc kubenswrapper[4745]: E1208 00:08:00.882901 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.917668 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.917731 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.917750 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.917780 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:00 crc kubenswrapper[4745]: I1208 00:08:00.917799 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:00Z","lastTransitionTime":"2025-12-08T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.020260 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.020334 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.020351 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.020380 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.020401 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.120345 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:01 crc kubenswrapper[4745]: E1208 00:08:01.120660 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:01 crc kubenswrapper[4745]: E1208 00:08:01.120778 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:02.120750725 +0000 UTC m=+37.549957055 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.123439 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.123515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.123539 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.123570 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.123594 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.208235 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.209549 4745 scope.go:117] "RemoveContainer" containerID="5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8" Dec 08 00:08:01 crc kubenswrapper[4745]: E1208 00:08:01.209802 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.226889 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.227001 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.227027 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.227057 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.227080 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.227532 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.244919 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.267278 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.289848 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.312543 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.330093 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.330157 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.330175 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.330200 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.330217 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.331028 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.352964 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.371887 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.392392 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.407302 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.433792 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.433862 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.433883 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.433910 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.433961 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.442120 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.456884 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.473300 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.492760 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.513179 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.529446 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.536850 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.536953 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.536979 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.537011 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.537039 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.545151 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.639980 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.640385 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.640412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.640445 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.640469 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.706313 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.734203 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.743735 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.743788 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.743805 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.743836 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.743855 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.753222 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.767080 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.782144 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.799302 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.812033 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.826871 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.846622 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.846669 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.846681 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.846697 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.846891 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.847219 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.863474 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.882017 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.882143 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:01 crc kubenswrapper[4745]: E1208 00:08:01.882173 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:01 crc kubenswrapper[4745]: E1208 00:08:01.882358 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.899487 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.911619 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.928189 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.945993 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.949751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.949787 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.949796 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.949810 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.949820 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:01Z","lastTransitionTime":"2025-12-08T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.964334 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:01 crc kubenswrapper[4745]: I1208 00:08:01.986871 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.002534 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:01Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.038880 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.052870 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.052923 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.052949 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.052968 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.052980 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.137711 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:02 crc kubenswrapper[4745]: E1208 00:08:02.138015 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:02 crc kubenswrapper[4745]: E1208 00:08:02.138217 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:04.138179899 +0000 UTC m=+39.567386379 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.156040 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.156098 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.156118 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.156145 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.156165 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.190133 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" event={"ID":"494c0a99-4094-400f-a072-51183fae347d","Type":"ContainerStarted","Data":"445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.190195 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" event={"ID":"494c0a99-4094-400f-a072-51183fae347d","Type":"ContainerStarted","Data":"2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.211750 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.233830 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.253393 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.258780 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.258901 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.258952 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.258988 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.259014 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.289362 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.310308 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.329238 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.350984 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.362304 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.362398 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.362416 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.362440 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.362457 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.369804 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.393729 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.409428 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.445631 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.464501 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.464876 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.464967 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.464985 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.465014 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.465031 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.484240 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.503209 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.527692 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.548235 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.564220 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:02Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.568344 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.568421 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.568445 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.568473 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.568496 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.671147 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.671220 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.671242 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.671272 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.671292 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.774545 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.774603 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.774623 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.774650 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.774668 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.878201 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.878235 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.878245 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.878262 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.878273 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.881959 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.882026 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:02 crc kubenswrapper[4745]: E1208 00:08:02.882128 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:02 crc kubenswrapper[4745]: E1208 00:08:02.882255 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.981690 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.981775 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.981799 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.981830 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:02 crc kubenswrapper[4745]: I1208 00:08:02.981852 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:02Z","lastTransitionTime":"2025-12-08T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.085369 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.085445 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.085470 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.085504 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.085527 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.188855 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.188965 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.188993 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.189021 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.189043 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.300437 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.300525 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.300551 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.300585 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.300609 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.404456 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.404515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.404532 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.404558 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.404575 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.508871 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.508982 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.509002 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.509027 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.509045 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.612200 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.612262 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.612281 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.612308 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.612327 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.716040 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.716119 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.716139 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.716165 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.716185 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.818763 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.818833 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.818856 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.818885 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.818912 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.882032 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.882032 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:03 crc kubenswrapper[4745]: E1208 00:08:03.882403 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:03 crc kubenswrapper[4745]: E1208 00:08:03.882515 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.922526 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.922603 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.922629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.922662 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:03 crc kubenswrapper[4745]: I1208 00:08:03.922684 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:03Z","lastTransitionTime":"2025-12-08T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.025791 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.025882 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.025903 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.025924 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.025977 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.129654 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.129706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.129724 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.129747 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.129764 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.163588 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:04 crc kubenswrapper[4745]: E1208 00:08:04.163804 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:04 crc kubenswrapper[4745]: E1208 00:08:04.163985 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:08.163903028 +0000 UTC m=+43.593109368 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.232111 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.232190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.232206 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.232238 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.232258 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.335661 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.335746 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.335766 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.335833 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.335852 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.439668 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.439746 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.439759 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.439782 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.439799 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.543386 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.543429 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.543438 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.543451 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.543460 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.646321 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.646386 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.646408 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.646436 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.646456 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.748868 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.748916 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.748950 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.748969 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.748983 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.852026 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.852079 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.852096 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.852119 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.852137 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.881739 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:04 crc kubenswrapper[4745]: E1208 00:08:04.882008 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.882134 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:04 crc kubenswrapper[4745]: E1208 00:08:04.882361 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.914967 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:04Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.936852 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:04Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.954890 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.955151 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.955320 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.955487 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.955637 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:04Z","lastTransitionTime":"2025-12-08T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.959101 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:04Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:04 crc kubenswrapper[4745]: I1208 00:08:04.981539 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:04Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.010889 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.029905 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.053164 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.058946 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.058999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.059018 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.059046 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.059070 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.070299 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.091612 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.106822 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.119332 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.133788 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.150657 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.162989 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.163047 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.163062 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.163086 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.163102 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.173406 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.192157 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.222424 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.238613 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:05Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.267063 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.267103 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.267115 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.267133 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.267148 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.370851 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.370913 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.371361 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.371399 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.371417 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.474983 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.475044 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.475062 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.475093 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.475113 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.579552 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.579622 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.579642 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.579668 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.579685 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.683108 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.683173 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.683195 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.683226 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.683272 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.786416 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.786478 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.786501 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.786523 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.786541 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.881701 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.881728 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:05 crc kubenswrapper[4745]: E1208 00:08:05.881859 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:05 crc kubenswrapper[4745]: E1208 00:08:05.882258 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.890124 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.890198 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.890228 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.890262 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.890288 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.993485 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.993545 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.993563 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.993587 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:05 crc kubenswrapper[4745]: I1208 00:08:05.993608 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:05Z","lastTransitionTime":"2025-12-08T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.095836 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.095892 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.095908 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.095981 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.096019 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.199327 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.199381 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.199394 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.199410 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.199422 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.302516 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.302587 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.302632 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.302663 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.302682 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.405738 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.405808 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.405826 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.405850 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.405869 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.509858 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.509909 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.509921 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.509964 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.509979 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.612396 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.612447 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.612458 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.612476 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.612491 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.715731 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.715795 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.715816 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.715844 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.715862 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.818783 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.818855 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.818874 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.818901 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.818919 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.882282 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:06 crc kubenswrapper[4745]: E1208 00:08:06.882490 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.882637 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:06 crc kubenswrapper[4745]: E1208 00:08:06.882905 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.916746 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.916805 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.916817 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.916836 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.916850 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: E1208 00:08:06.935266 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:06Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.941238 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.941280 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.941296 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.941312 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.941324 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: E1208 00:08:06.960366 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:06Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.965094 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.965143 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.965155 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.965172 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.965189 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:06 crc kubenswrapper[4745]: E1208 00:08:06.983447 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:06Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.987530 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.987576 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.987590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.987610 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:06 crc kubenswrapper[4745]: I1208 00:08:06.987625 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:06Z","lastTransitionTime":"2025-12-08T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: E1208 00:08:07.007465 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:07Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.012693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.012781 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.012809 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.012842 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.012868 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: E1208 00:08:07.032034 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:07Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:07 crc kubenswrapper[4745]: E1208 00:08:07.032187 4745 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.034398 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.034434 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.034447 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.034469 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.034485 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.137595 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.137657 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.137679 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.137705 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.137724 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.240347 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.240415 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.240434 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.240460 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.240479 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.343667 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.343731 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.343754 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.343786 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.343810 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.447116 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.447176 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.447193 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.447217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.447235 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.550374 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.550428 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.550444 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.550470 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.550490 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.653129 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.653192 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.653210 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.653235 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.653256 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.756138 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.756200 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.756217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.756241 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.756330 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.859609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.859676 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.859693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.859725 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.859743 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.882695 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.882710 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:07 crc kubenswrapper[4745]: E1208 00:08:07.882953 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:07 crc kubenswrapper[4745]: E1208 00:08:07.883137 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.963218 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.963287 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.963306 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.963331 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:07 crc kubenswrapper[4745]: I1208 00:08:07.963348 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:07Z","lastTransitionTime":"2025-12-08T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.066641 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.066706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.066723 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.066747 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.066768 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.170088 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.170165 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.170188 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.170218 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.170239 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.212426 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:08 crc kubenswrapper[4745]: E1208 00:08:08.212698 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:08 crc kubenswrapper[4745]: E1208 00:08:08.212797 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:16.212767392 +0000 UTC m=+51.641973722 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.274061 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.274132 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.274149 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.274177 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.274196 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.379168 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.379231 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.379248 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.379281 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.379303 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.482285 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.482347 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.482365 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.482385 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.482402 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.585892 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.585992 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.586018 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.586103 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.586128 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.689481 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.689531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.689548 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.689569 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.689586 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.792690 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.792763 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.792787 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.792817 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.792840 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.881796 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.881913 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:08 crc kubenswrapper[4745]: E1208 00:08:08.882095 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:08 crc kubenswrapper[4745]: E1208 00:08:08.882266 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.895266 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.895329 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.895374 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.895399 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.895418 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.998672 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.998737 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.998756 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.998781 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:08 crc kubenswrapper[4745]: I1208 00:08:08.998801 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:08Z","lastTransitionTime":"2025-12-08T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.102295 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.102350 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.102366 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.102390 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.102408 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.206301 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.206371 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.206397 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.206429 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.206453 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.309665 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.309720 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.309740 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.309769 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.309789 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.412470 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.412532 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.412554 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.412583 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.412602 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.515621 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.515671 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.515683 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.515700 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.515711 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.618249 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.618297 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.618308 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.618327 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.618339 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.720391 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.720455 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.720472 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.720497 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.720519 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.822252 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.822707 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.822731 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.822789 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.822806 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.882207 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.882244 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:09 crc kubenswrapper[4745]: E1208 00:08:09.882412 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:09 crc kubenswrapper[4745]: E1208 00:08:09.882546 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.925907 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.926011 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.926036 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.926067 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:09 crc kubenswrapper[4745]: I1208 00:08:09.926091 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:09Z","lastTransitionTime":"2025-12-08T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.028977 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.029014 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.029022 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.029037 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.029046 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.131178 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.131239 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.131255 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.131279 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.131298 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.234397 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.234496 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.234516 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.234541 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.234558 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.337051 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.337112 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.337128 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.337155 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.337172 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.440120 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.440162 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.440174 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.440191 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.440202 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.543156 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.543214 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.543230 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.543254 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.543270 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.646538 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.646610 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.646634 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.646661 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.646679 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.749461 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.749518 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.749534 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.749560 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.749577 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.852066 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.852145 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.852173 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.852204 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.852227 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.881803 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.881906 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:10 crc kubenswrapper[4745]: E1208 00:08:10.882057 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:10 crc kubenswrapper[4745]: E1208 00:08:10.882295 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.955543 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.955633 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.955659 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.955691 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:10 crc kubenswrapper[4745]: I1208 00:08:10.955714 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:10Z","lastTransitionTime":"2025-12-08T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.058963 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.059035 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.059055 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.059081 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.059099 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.161686 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.161749 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.161776 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.161803 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.161823 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.264278 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.264328 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.264349 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.264376 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.264397 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.367252 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.367320 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.367342 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.367365 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.367383 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.470510 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.470557 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.470568 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.470587 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.470602 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.573146 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.573180 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.573190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.573209 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.573220 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.675989 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.676056 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.676080 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.676109 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.676131 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.778515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.778601 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.778624 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.778652 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.778669 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.880680 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.880725 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.880743 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.880762 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.880774 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.882079 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.882123 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:11 crc kubenswrapper[4745]: E1208 00:08:11.882206 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:11 crc kubenswrapper[4745]: E1208 00:08:11.882305 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.983885 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.983977 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.983995 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.984019 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:11 crc kubenswrapper[4745]: I1208 00:08:11.984034 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:11Z","lastTransitionTime":"2025-12-08T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.086856 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.086980 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.087000 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.087022 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.087039 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.189812 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.189874 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.189890 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.189916 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.189958 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.292716 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.292822 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.292842 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.292869 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.292887 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.396792 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.396887 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.396917 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.396998 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.397024 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.499737 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.499799 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.499816 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.499842 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.499864 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.602919 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.603030 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.603058 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.603136 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.603163 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.707106 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.707185 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.707213 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.707243 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.707265 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.810760 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.810832 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.810854 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.810883 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.810906 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.882176 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.882306 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:12 crc kubenswrapper[4745]: E1208 00:08:12.882365 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:12 crc kubenswrapper[4745]: E1208 00:08:12.882491 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.913456 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.913489 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.913499 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.913514 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:12 crc kubenswrapper[4745]: I1208 00:08:12.913528 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:12Z","lastTransitionTime":"2025-12-08T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.017213 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.017274 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.017292 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.017317 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.017336 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.120694 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.120768 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.120790 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.120814 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.120831 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.223639 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.223717 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.223741 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.223771 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.223794 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.326845 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.326953 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.326973 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.326997 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.327014 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.429847 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.429914 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.429961 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.429987 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.430004 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.533335 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.533431 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.533455 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.533484 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.533506 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.636412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.636475 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.636494 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.636520 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.636537 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.739110 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.739172 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.739189 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.739212 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.739230 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.842139 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.842195 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.842212 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.842234 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.842251 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.882274 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.882276 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:13 crc kubenswrapper[4745]: E1208 00:08:13.882457 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:13 crc kubenswrapper[4745]: E1208 00:08:13.882577 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.946406 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.946525 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.946547 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.946580 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:13 crc kubenswrapper[4745]: I1208 00:08:13.946603 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:13Z","lastTransitionTime":"2025-12-08T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.049481 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.049578 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.049597 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.049619 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.049637 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.153068 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.153137 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.153159 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.153190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.153214 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.256235 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.256296 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.256313 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.256336 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.256353 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.359300 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.359357 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.359375 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.359398 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.359416 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.462385 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.462433 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.462449 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.462474 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.462490 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.566155 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.566213 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.566230 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.566255 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.566276 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.669316 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.669367 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.669382 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.669404 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.669422 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.772792 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.773041 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.773074 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.773106 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.773127 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.876515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.876568 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.876590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.876620 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.876642 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.882395 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.882532 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:14 crc kubenswrapper[4745]: E1208 00:08:14.882766 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:14 crc kubenswrapper[4745]: E1208 00:08:14.883036 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.884137 4745 scope.go:117] "RemoveContainer" containerID="5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.936668 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:14Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.965164 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:14Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.978151 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.978372 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.978389 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.978407 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.978419 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:14Z","lastTransitionTime":"2025-12-08T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:14 crc kubenswrapper[4745]: I1208 00:08:14.984098 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:14Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.000658 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:14Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.011661 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.026762 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.037559 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.060326 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.072288 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.081125 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.081166 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.081176 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.081190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.081200 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.087980 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.103291 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.114902 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.128865 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.141354 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.152126 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.160699 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.170080 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.183864 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.183906 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.183917 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.183954 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.183966 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.238839 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/1.log" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.242781 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.243386 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.262737 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.281150 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.286431 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.286479 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.286499 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.286522 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.286538 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.294506 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.324356 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.340651 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.369211 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.389304 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.389391 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.389405 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.389425 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.389439 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.392001 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.414116 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.434982 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.454773 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.470521 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.485814 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.491071 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.491110 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.491125 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.491141 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.491150 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.500177 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.516523 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.529024 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.541971 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.556819 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:15Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.594477 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.594518 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.594529 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.594545 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.594557 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.694326 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.694484 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.694604 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.694710 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.694821 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.694840 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:47.694813026 +0000 UTC m=+83.124019356 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.694984 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:47.69495456 +0000 UTC m=+83.124160860 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.695530 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:08:47.695510334 +0000 UTC m=+83.124716674 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.696501 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.696558 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.696581 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.696609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.696648 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.795704 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.796021 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.796057 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.796082 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.796167 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:47.796139282 +0000 UTC m=+83.225345622 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.799615 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.799670 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.799693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.799724 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.799746 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.882418 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.882521 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.882613 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.882733 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.896832 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.897073 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.897131 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.897154 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:15 crc kubenswrapper[4745]: E1208 00:08:15.897236 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:47.897212811 +0000 UTC m=+83.326419141 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.902977 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.903028 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.903047 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.903074 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:15 crc kubenswrapper[4745]: I1208 00:08:15.903097 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:15Z","lastTransitionTime":"2025-12-08T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.005660 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.005719 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.005736 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.005761 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.005781 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.108163 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.108231 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.108250 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.108279 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.108298 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.211078 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.211145 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.211163 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.211187 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.211205 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.302780 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:16 crc kubenswrapper[4745]: E1208 00:08:16.303214 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:16 crc kubenswrapper[4745]: E1208 00:08:16.303979 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:08:32.303329414 +0000 UTC m=+67.732535744 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.315860 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.316001 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.316032 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.316063 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.316087 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.419069 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.419131 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.419156 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.419188 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.419209 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.522504 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.522569 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.522585 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.522609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.522627 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.625251 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.625303 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.625320 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.625343 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.625360 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.728083 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.728196 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.728218 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.728242 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.728261 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.831233 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.831290 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.831309 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.831333 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.831350 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.882077 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.882172 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:16 crc kubenswrapper[4745]: E1208 00:08:16.882260 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:16 crc kubenswrapper[4745]: E1208 00:08:16.882418 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.934797 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.934871 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.934893 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.934920 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:16 crc kubenswrapper[4745]: I1208 00:08:16.934974 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:16Z","lastTransitionTime":"2025-12-08T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.038540 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.038613 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.038635 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.038663 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.038682 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.129876 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.130023 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.130042 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.130069 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.130087 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.152226 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.157685 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.157747 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.157765 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.157788 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.157804 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.181433 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.186118 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.186166 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.186182 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.186203 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.186217 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.206558 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.212389 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.212474 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.212501 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.212534 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.212566 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.237147 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.242698 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.242768 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.242794 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.242825 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.242848 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.253464 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/2.log" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.254403 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/1.log" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.259356 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933" exitCode=1 Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.259410 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.259488 4745 scope.go:117] "RemoveContainer" containerID="5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.260632 4745 scope.go:117] "RemoveContainer" containerID="f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933" Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.260971 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.264567 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.265150 4745 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.271363 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.271426 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.271450 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.271479 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.271501 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.289874 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.306711 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.340121 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.359849 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.374327 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.374398 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.374422 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.374456 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.374480 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.382837 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.404803 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.423165 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.444041 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.462752 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.477526 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.477829 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.478058 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.478244 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.478423 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.485735 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.500522 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.515537 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.548873 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.566553 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.581776 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.581836 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.581853 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.581880 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.581898 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.586236 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.605445 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.622083 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:17Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.684309 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.684351 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.684363 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.684379 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.684393 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.787201 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.787241 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.787248 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.787266 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.787276 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.882486 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.882499 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.883692 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:17 crc kubenswrapper[4745]: E1208 00:08:17.883696 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.891018 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.891247 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.891481 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.891686 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.891856 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.995145 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.995189 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.995205 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.995232 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:17 crc kubenswrapper[4745]: I1208 00:08:17.995251 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:17Z","lastTransitionTime":"2025-12-08T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.098811 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.098874 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.098892 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.098917 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.098979 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.202220 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.202546 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.202562 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.202589 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.202606 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.265845 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/2.log" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.305815 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.305865 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.305881 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.305903 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.305920 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.409360 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.409512 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.409531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.409554 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.409571 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.512491 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.512555 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.512579 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.512608 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.512662 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.615803 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.615849 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.615865 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.615888 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.615904 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.675077 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.689914 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.694671 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.718281 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.720230 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.720305 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.720330 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.720361 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.720383 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.739614 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.760637 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.776702 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.796658 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.823792 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.823851 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.823869 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.823894 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.823914 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.830331 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.850250 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.870838 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.881739 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:18 crc kubenswrapper[4745]: E1208 00:08:18.882021 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.882202 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:18 crc kubenswrapper[4745]: E1208 00:08:18.882482 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.892536 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.909041 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.927076 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.927156 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.927183 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.927252 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.927278 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:18Z","lastTransitionTime":"2025-12-08T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.931470 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.946593 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.979068 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:18 crc kubenswrapper[4745]: I1208 00:08:18.997102 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:18Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.014413 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:19Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.030849 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.030911 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.030956 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.030981 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.030999 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.035706 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:19Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.134275 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.134353 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.134378 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.134408 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.134433 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.236487 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.236535 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.236550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.236567 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.236579 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.339387 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.339424 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.339435 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.339451 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.339463 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.442467 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.442517 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.442528 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.442547 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.442559 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.546078 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.546164 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.546183 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.546709 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.546771 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.651463 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.651517 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.651536 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.651555 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.651570 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.754125 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.754172 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.754184 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.754201 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.754212 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.858026 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.858098 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.858116 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.858146 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.858193 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.882489 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.882489 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:19 crc kubenswrapper[4745]: E1208 00:08:19.882683 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:19 crc kubenswrapper[4745]: E1208 00:08:19.882920 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.961886 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.962007 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.962036 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.962069 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:19 crc kubenswrapper[4745]: I1208 00:08:19.962093 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:19Z","lastTransitionTime":"2025-12-08T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.064800 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.064883 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.064921 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.064987 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.065028 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.167884 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.167981 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.168001 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.168028 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.168046 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.270893 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.271048 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.271114 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.271146 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.271206 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.374445 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.374495 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.374511 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.374534 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.374551 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.477466 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.477561 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.477602 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.477635 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.477656 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.581350 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.581460 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.581490 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.581520 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.581540 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.684908 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.685023 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.685041 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.685072 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.685099 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.788751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.788798 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.788815 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.788840 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.788857 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.882264 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.882295 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:20 crc kubenswrapper[4745]: E1208 00:08:20.882489 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:20 crc kubenswrapper[4745]: E1208 00:08:20.882662 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.891282 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.891347 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.891371 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.891401 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.891422 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.994062 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.994106 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.994116 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.994132 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:20 crc kubenswrapper[4745]: I1208 00:08:20.994143 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:20Z","lastTransitionTime":"2025-12-08T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.097509 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.097565 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.097581 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.097604 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.097621 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.200626 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.200730 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.200748 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.200772 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.200789 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.303475 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.303526 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.303543 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.303567 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.303584 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.406605 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.406681 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.406706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.406736 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.406757 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.509643 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.509701 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.509720 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.509744 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.509764 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.613130 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.613209 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.613229 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.613254 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.613272 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.716893 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.716991 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.717015 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.717043 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.717062 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.819814 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.819877 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.819900 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.819964 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.819990 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.881771 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.881777 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:21 crc kubenswrapper[4745]: E1208 00:08:21.882005 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:21 crc kubenswrapper[4745]: E1208 00:08:21.882144 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.923016 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.923073 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.923085 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.923104 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:21 crc kubenswrapper[4745]: I1208 00:08:21.923116 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:21Z","lastTransitionTime":"2025-12-08T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.025977 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.026025 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.026035 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.026055 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.026067 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.131165 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.131258 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.131311 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.131350 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.131370 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.234144 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.234180 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.234189 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.234204 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.234215 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.337389 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.337451 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.337472 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.337500 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.337522 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.440206 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.440262 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.440283 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.440303 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.440316 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.543815 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.543892 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.543909 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.543972 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.543997 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.646901 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.647000 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.647018 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.647044 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.647061 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.750679 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.750765 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.750790 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.750823 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.750848 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.854011 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.854097 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.854124 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.854159 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.854187 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.881885 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:22 crc kubenswrapper[4745]: E1208 00:08:22.882102 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.882237 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:22 crc kubenswrapper[4745]: E1208 00:08:22.882407 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.957140 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.957192 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.957211 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.957236 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:22 crc kubenswrapper[4745]: I1208 00:08:22.957255 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:22Z","lastTransitionTime":"2025-12-08T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.060963 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.061010 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.061021 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.061043 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.061056 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.164226 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.164326 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.164350 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.164379 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.164402 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.267455 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.267511 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.267530 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.267555 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.267573 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.370605 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.370675 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.370691 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.370719 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.370738 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.474164 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.474248 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.474273 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.474300 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.474318 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.577361 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.577434 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.577459 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.577600 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.577623 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.681455 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.681515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.681532 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.681559 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.681577 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.784488 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.784564 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.784583 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.784649 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.784668 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.882017 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.882051 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:23 crc kubenswrapper[4745]: E1208 00:08:23.882336 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:23 crc kubenswrapper[4745]: E1208 00:08:23.882481 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.888999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.889083 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.889101 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.889126 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.889144 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.992005 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.992072 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.992096 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.992125 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:23 crc kubenswrapper[4745]: I1208 00:08:23.992148 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:23Z","lastTransitionTime":"2025-12-08T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.095459 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.095524 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.095544 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.095571 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.095589 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.199243 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.199318 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.199339 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.199365 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.199386 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.302161 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.302222 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.302238 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.302264 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.302280 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.405322 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.405404 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.405428 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.405459 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.405485 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.509285 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.509367 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.509389 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.509417 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.509442 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.612557 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.612625 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.612647 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.612675 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.612698 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.719860 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.719987 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.720009 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.720033 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.720054 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.822794 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.822850 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.822872 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.822902 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.822920 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.881732 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.881739 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:24 crc kubenswrapper[4745]: E1208 00:08:24.881967 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:24 crc kubenswrapper[4745]: E1208 00:08:24.882111 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.908247 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:24Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.928382 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.928471 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.928489 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.928546 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.928567 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:24Z","lastTransitionTime":"2025-12-08T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.930196 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:24Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.948804 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:24Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.975380 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d8d1b56839543aa33cec68b29b9de99c6827beaddc3a43a64c81574e1b7b3b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:07:57Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:dce28c51-c9f1-478b-97c8-7e209d6e7cbe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1208 00:07:57.065397 6218 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:24Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:24 crc kubenswrapper[4745]: I1208 00:08:24.992303 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:24Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.022972 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.031362 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.031406 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.031421 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.031442 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.031456 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.043884 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.062301 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.082265 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.104704 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.120544 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.134590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.134649 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.134669 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.134693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.134710 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.142826 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.160714 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.177230 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.195833 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.213505 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.228716 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.238581 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.238916 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.239177 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.239400 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.239596 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.245652 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:25Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.341900 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.341998 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.342022 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.342050 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.342074 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.445783 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.445850 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.445873 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.445905 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.445960 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.549116 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.549196 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.549222 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.549251 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.549271 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.652340 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.652387 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.652403 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.652427 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.652447 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.756098 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.756586 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.756798 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.757027 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.757220 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.860571 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.860646 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.860669 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.860712 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.860734 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.882517 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.882521 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:25 crc kubenswrapper[4745]: E1208 00:08:25.882748 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:25 crc kubenswrapper[4745]: E1208 00:08:25.882859 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.964003 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.964050 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.964062 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.964081 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:25 crc kubenswrapper[4745]: I1208 00:08:25.964092 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:25Z","lastTransitionTime":"2025-12-08T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.066657 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.066693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.066702 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.066716 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.066725 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.169479 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.169534 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.169555 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.169585 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.169607 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.272519 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.272586 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.272603 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.272630 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.272649 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.375052 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.375126 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.375145 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.375173 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.375192 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.478385 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.478428 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.478445 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.478465 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.478478 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.581088 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.581570 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.581596 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.581632 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.581656 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.685079 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.685138 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.685156 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.685180 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.685197 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.787752 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.787813 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.787830 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.787858 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.787875 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.885101 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:26 crc kubenswrapper[4745]: E1208 00:08:26.885259 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.885572 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:26 crc kubenswrapper[4745]: E1208 00:08:26.885662 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.890505 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.890539 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.890552 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.890568 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.890580 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.992686 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.992724 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.992732 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.992745 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:26 crc kubenswrapper[4745]: I1208 00:08:26.992756 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:26Z","lastTransitionTime":"2025-12-08T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.096146 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.096218 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.096243 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.096268 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.096286 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.199679 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.199754 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.199779 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.199806 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.199872 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.302476 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.302551 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.302586 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.302618 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.302643 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.406636 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.406689 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.406733 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.406764 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.406783 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.510414 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.510498 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.510515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.510539 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.510595 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.553643 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.553726 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.553748 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.553778 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.553797 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.574807 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:27Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.579804 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.579865 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.579888 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.579919 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.580008 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.601147 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:27Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.605735 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.605773 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.605783 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.605826 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.605839 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.625098 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:27Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.630116 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.630210 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.630228 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.630288 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.630307 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.651870 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:27Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.657455 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.657503 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.657520 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.657543 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.657563 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.677663 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:27Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.677900 4745 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.679737 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.679785 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.679803 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.679827 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.679845 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.782295 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.782363 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.782386 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.782412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.782434 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.882217 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.882269 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.882343 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:27 crc kubenswrapper[4745]: E1208 00:08:27.882448 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.884809 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.884883 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.884900 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.884956 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.884975 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.987365 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.987509 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.987582 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.987669 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:27 crc kubenswrapper[4745]: I1208 00:08:27.987699 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:27Z","lastTransitionTime":"2025-12-08T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.090698 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.090772 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.090797 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.090826 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.090865 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.193878 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.193968 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.193987 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.194007 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.194021 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.296159 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.296203 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.296215 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.296232 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.296245 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.398540 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.398617 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.398641 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.398670 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.398690 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.500984 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.501031 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.501043 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.501059 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.501071 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.604015 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.604058 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.604072 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.604089 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.604101 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.706725 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.706792 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.706810 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.706835 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.706854 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.809590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.809639 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.809655 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.809677 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.809889 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.882605 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.882645 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:28 crc kubenswrapper[4745]: E1208 00:08:28.882851 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:28 crc kubenswrapper[4745]: E1208 00:08:28.882990 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.896206 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.911716 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.911762 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.911772 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.911789 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:28 crc kubenswrapper[4745]: I1208 00:08:28.911801 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:28Z","lastTransitionTime":"2025-12-08T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.013672 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.013705 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.013714 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.013727 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.013737 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.115526 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.115559 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.115567 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.115579 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.115587 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.217239 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.217300 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.217316 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.217339 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.217358 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.319058 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.319106 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.319118 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.319137 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.319149 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.420941 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.420975 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.420986 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.421003 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.421014 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.523688 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.523723 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.523732 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.523747 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.523756 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.625842 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.625891 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.625902 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.625919 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.625949 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.728444 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.728474 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.728482 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.728495 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.728503 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.831484 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.831536 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.831552 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.831576 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.831593 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.882041 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.882070 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:29 crc kubenswrapper[4745]: E1208 00:08:29.882224 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:29 crc kubenswrapper[4745]: E1208 00:08:29.882321 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.933900 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.933955 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.933968 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.933983 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:29 crc kubenswrapper[4745]: I1208 00:08:29.933993 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:29Z","lastTransitionTime":"2025-12-08T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.037543 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.037607 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.037629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.037652 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.037669 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.142566 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.142796 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.142815 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.142842 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.142866 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.245833 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.245950 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.245973 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.245997 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.246015 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.348678 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.348716 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.348725 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.348740 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.348750 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.451469 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.451520 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.451535 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.451555 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.451566 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.554443 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.554508 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.554531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.554561 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.554584 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.656521 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.656564 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.656578 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.656596 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.656608 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.758808 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.758908 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.758955 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.758980 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.759033 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.861703 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.861798 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.861832 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.861864 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.861883 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.881648 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.881783 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:30 crc kubenswrapper[4745]: E1208 00:08:30.881904 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:30 crc kubenswrapper[4745]: E1208 00:08:30.882063 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.964626 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.964700 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.964721 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.964744 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:30 crc kubenswrapper[4745]: I1208 00:08:30.964761 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:30Z","lastTransitionTime":"2025-12-08T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.067520 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.067583 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.067600 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.067625 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.067643 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.170074 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.170109 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.170117 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.170131 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.170142 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.272741 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.272787 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.272800 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.272820 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.272833 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.374970 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.375008 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.375019 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.375037 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.375048 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.478359 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.478390 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.478398 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.478410 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.478419 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.581126 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.581174 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.581186 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.581207 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.581220 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.683387 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.683422 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.683433 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.683447 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.683457 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.786507 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.786564 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.786582 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.786606 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.786626 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.881683 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.881693 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:31 crc kubenswrapper[4745]: E1208 00:08:31.882052 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:31 crc kubenswrapper[4745]: E1208 00:08:31.881852 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.889580 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.889616 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.889627 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.889644 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.889655 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.992188 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.992258 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.992282 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.992311 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:31 crc kubenswrapper[4745]: I1208 00:08:31.992329 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:31Z","lastTransitionTime":"2025-12-08T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.094339 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.094380 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.094392 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.094407 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.094415 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.196458 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.196509 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.196531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.196554 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.196569 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.298603 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.298635 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.298646 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.298658 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.298666 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.324094 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:32 crc kubenswrapper[4745]: E1208 00:08:32.324218 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:32 crc kubenswrapper[4745]: E1208 00:08:32.324278 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:09:04.324262625 +0000 UTC m=+99.753468925 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.400669 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.400732 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.400754 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.400781 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.400802 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.502945 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.502983 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.502994 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.503010 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.503022 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.605216 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.605247 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.605255 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.605268 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.605276 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.707556 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.707626 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.707651 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.707679 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.707699 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.810054 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.810093 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.810109 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.810129 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.810143 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.881871 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.881985 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:32 crc kubenswrapper[4745]: E1208 00:08:32.882537 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:32 crc kubenswrapper[4745]: E1208 00:08:32.882703 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.883455 4745 scope.go:117] "RemoveContainer" containerID="f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933" Dec 08 00:08:32 crc kubenswrapper[4745]: E1208 00:08:32.883726 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.894654 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:32Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.911458 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:32Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.918447 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.918498 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.918510 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.918529 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.918545 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:32Z","lastTransitionTime":"2025-12-08T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.928766 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:32Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.938065 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:32Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.963356 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:32Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.973196 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:32Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:32 crc kubenswrapper[4745]: I1208 00:08:32.991310 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:32Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.004154 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.013415 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.021049 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.021069 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.021077 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.021090 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.021100 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.027992 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.047186 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.057901 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.075078 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.088755 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.104202 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.123301 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.123550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.123584 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.123595 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.123610 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.123622 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.141569 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.152254 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.166590 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.225794 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.225829 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.225839 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.225853 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.225862 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.316943 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/0.log" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.317003 4745 generic.go:334] "Generic (PLEG): container finished" podID="73d47ce8-04b5-4dba-aa14-655581a103a8" containerID="c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c" exitCode=1 Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.317037 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerDied","Data":"c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.317420 4745 scope.go:117] "RemoveContainer" containerID="c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.328375 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.328427 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.328442 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.328461 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.328474 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.331344 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.350644 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.368659 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.383210 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.408057 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.419679 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.430934 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.430968 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.430982 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.431000 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.431014 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.442326 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.455751 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.468193 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.486841 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"2025-12-08T00:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82\\\\n2025-12-08T00:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82 to /host/opt/cni/bin/\\\\n2025-12-08T00:07:48Z [verbose] multus-daemon started\\\\n2025-12-08T00:07:48Z [verbose] Readiness Indicator file check\\\\n2025-12-08T00:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.507144 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.518095 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.530941 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.532525 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.532567 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.532580 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.532597 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.532610 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.543641 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.555363 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.569460 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.580618 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.591268 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.604804 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:33Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.635271 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.635296 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.635306 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.635321 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.635331 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.737527 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.737563 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.737575 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.737590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.737600 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.839981 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.840014 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.840022 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.840035 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.840044 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.881665 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.881734 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:33 crc kubenswrapper[4745]: E1208 00:08:33.881828 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:33 crc kubenswrapper[4745]: E1208 00:08:33.881966 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.942644 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.942686 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.942699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.942717 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:33 crc kubenswrapper[4745]: I1208 00:08:33.942729 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:33Z","lastTransitionTime":"2025-12-08T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.044978 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.045022 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.045033 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.045052 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.045062 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.147629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.147683 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.147700 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.147734 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.147769 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.249871 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.249907 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.249917 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.249948 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.249962 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.323620 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/0.log" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.323708 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerStarted","Data":"aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.344451 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.352434 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.352498 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.352522 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.352550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.352573 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.375690 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.392896 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.409365 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.428540 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.450677 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.455326 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.455394 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.455413 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.455438 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.455455 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.467712 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"2025-12-08T00:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82\\\\n2025-12-08T00:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82 to /host/opt/cni/bin/\\\\n2025-12-08T00:07:48Z [verbose] multus-daemon started\\\\n2025-12-08T00:07:48Z [verbose] Readiness Indicator file check\\\\n2025-12-08T00:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.485535 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.502156 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.536873 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.550670 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.558091 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.558126 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.558137 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.558153 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.558166 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.564474 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.576842 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.593333 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.610089 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.623310 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.641546 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.656607 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.660494 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.660524 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.660536 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.660551 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.660562 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.667565 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.763139 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.763191 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.763210 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.763234 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.763251 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.865999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.866047 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.866066 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.866090 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.866107 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.884162 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.884211 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:34 crc kubenswrapper[4745]: E1208 00:08:34.884297 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:34 crc kubenswrapper[4745]: E1208 00:08:34.884436 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.909192 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.925712 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.943869 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.959935 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.967685 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.967715 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.967740 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.967756 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.967766 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:34Z","lastTransitionTime":"2025-12-08T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.972310 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.982865 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:34 crc kubenswrapper[4745]: I1208 00:08:34.995149 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:34Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.008563 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.027303 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.070132 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.074077 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.074118 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.074137 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.074163 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.074181 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.085822 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.100001 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.115131 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.130074 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"2025-12-08T00:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82\\\\n2025-12-08T00:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82 to /host/opt/cni/bin/\\\\n2025-12-08T00:07:48Z [verbose] multus-daemon started\\\\n2025-12-08T00:07:48Z [verbose] Readiness Indicator file check\\\\n2025-12-08T00:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.153858 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.164520 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.175907 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.175956 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.175967 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.175983 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.175992 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.187339 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.199940 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.209732 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:35Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.278968 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.279026 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.279043 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.279067 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.279084 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.382120 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.382155 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.382164 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.382177 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.382187 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.484712 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.484766 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.484783 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.484806 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.484823 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.587191 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.587217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.587225 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.587238 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.587247 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.689762 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.689791 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.689801 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.689815 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.689825 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.791348 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.791381 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.791389 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.791403 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.791412 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.881767 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.881818 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:35 crc kubenswrapper[4745]: E1208 00:08:35.881965 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:35 crc kubenswrapper[4745]: E1208 00:08:35.882094 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.893994 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.894028 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.894041 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.894057 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.894068 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.996415 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.996494 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.996511 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.996536 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:35 crc kubenswrapper[4745]: I1208 00:08:35.996555 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:35Z","lastTransitionTime":"2025-12-08T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.099595 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.099659 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.099684 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.099714 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.099739 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.201986 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.202053 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.202070 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.202132 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.202154 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.305336 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.305374 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.305384 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.305399 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.305411 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.408545 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.408612 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.408633 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.408659 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.408678 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.511901 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.511984 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.512001 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.512024 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.512041 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.614372 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.614431 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.614450 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.614475 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.614492 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.716999 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.717054 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.717072 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.717096 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.717114 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.819785 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.819868 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.819896 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.819961 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.819987 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.882666 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.882799 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:36 crc kubenswrapper[4745]: E1208 00:08:36.883046 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:36 crc kubenswrapper[4745]: E1208 00:08:36.883240 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.922780 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.922841 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.922863 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.922893 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:36 crc kubenswrapper[4745]: I1208 00:08:36.922914 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:36Z","lastTransitionTime":"2025-12-08T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.025230 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.025287 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.025305 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.025330 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.025348 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.127829 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.127890 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.127907 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.127956 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.127974 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.231277 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.231366 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.231396 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.231426 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.231513 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.333292 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.333332 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.333344 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.333363 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.333374 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.435639 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.435675 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.435685 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.435699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.435708 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.538327 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.538384 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.538401 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.538424 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.538441 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.641793 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.641842 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.641857 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.641879 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.641900 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.744985 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.745044 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.745062 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.745085 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.745105 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.848293 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.848358 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.848375 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.848402 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.848421 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.865915 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.866022 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.866040 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.866066 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.866088 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.882195 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.882337 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.882304 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.882435 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.887093 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:37Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.892403 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.892476 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.892505 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.892535 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.892560 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.911431 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:37Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.917391 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.917447 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.917466 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.917491 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.917509 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.938032 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:37Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.942809 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.942874 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.942893 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.942918 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.942966 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.962477 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:37Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.971884 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.972805 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.973079 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.973300 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.973506 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.992379 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:37Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:37 crc kubenswrapper[4745]: E1208 00:08:37.993113 4745 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.995386 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.995627 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.995825 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.996051 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:37 crc kubenswrapper[4745]: I1208 00:08:37.996244 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:37Z","lastTransitionTime":"2025-12-08T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.098916 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.099002 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.099028 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.099074 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.099094 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.201622 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.201706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.201731 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.201763 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.201788 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.304215 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.304279 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.304301 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.304332 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.304355 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.407504 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.407846 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.408121 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.408310 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.408436 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.511588 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.511681 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.511751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.511782 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.511857 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.614459 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.615357 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.615513 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.615661 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.615792 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.719315 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.719443 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.719463 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.719489 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.719508 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.823147 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.823206 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.823224 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.823250 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.823270 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.881882 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.882058 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:38 crc kubenswrapper[4745]: E1208 00:08:38.882212 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:38 crc kubenswrapper[4745]: E1208 00:08:38.882578 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.926806 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.926898 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.926963 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.926993 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:38 crc kubenswrapper[4745]: I1208 00:08:38.927061 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:38Z","lastTransitionTime":"2025-12-08T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.030074 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.030171 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.030190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.030572 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.030603 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.133343 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.133382 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.133393 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.133409 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.133425 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.236788 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.237231 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.237386 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.237539 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.237677 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.343416 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.343477 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.343502 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.343531 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.343553 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.446814 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.447242 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.447396 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.447583 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.447713 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.550696 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.551176 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.551337 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.551499 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.551653 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.655012 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.655058 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.655076 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.655099 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.655117 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.758064 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.758120 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.758143 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.758173 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.758194 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.861118 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.861179 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.861196 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.861224 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.861241 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.882325 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.882405 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:39 crc kubenswrapper[4745]: E1208 00:08:39.882495 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:39 crc kubenswrapper[4745]: E1208 00:08:39.882697 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.964668 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.964725 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.964748 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.964777 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:39 crc kubenswrapper[4745]: I1208 00:08:39.964798 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:39Z","lastTransitionTime":"2025-12-08T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.067465 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.067520 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.067538 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.067561 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.067578 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.170382 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.170726 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.170891 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.171099 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.171248 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.274791 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.274840 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.274857 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.274880 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.274897 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.377309 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.377368 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.377385 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.377410 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.377427 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.486905 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.487008 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.487027 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.487053 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.487072 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.590055 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.590119 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.590134 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.590163 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.590184 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.692874 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.692971 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.692997 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.693025 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.693046 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.796524 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.796594 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.796615 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.796642 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.796659 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.882534 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.882660 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:40 crc kubenswrapper[4745]: E1208 00:08:40.882717 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:40 crc kubenswrapper[4745]: E1208 00:08:40.882906 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.899894 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.899991 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.900018 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.900049 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:40 crc kubenswrapper[4745]: I1208 00:08:40.900076 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:40Z","lastTransitionTime":"2025-12-08T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.002913 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.003018 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.003036 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.003064 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.003083 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.106135 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.106199 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.106215 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.106240 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.106260 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.209597 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.209650 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.209667 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.209691 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.209708 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.312220 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.312294 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.312317 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.312342 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.312360 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.414752 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.414962 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.414984 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.415012 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.415038 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.517597 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.517664 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.517683 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.517709 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.517727 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.621621 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.621676 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.621693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.621721 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.621742 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.724977 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.725014 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.725025 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.725044 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.725057 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.827293 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.827334 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.827343 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.827357 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.827366 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.881796 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.881802 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:41 crc kubenswrapper[4745]: E1208 00:08:41.882022 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:41 crc kubenswrapper[4745]: E1208 00:08:41.882147 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.930376 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.930442 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.930464 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.930494 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:41 crc kubenswrapper[4745]: I1208 00:08:41.930515 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:41Z","lastTransitionTime":"2025-12-08T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.034162 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.034223 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.034240 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.034265 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.034283 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.137891 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.138024 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.138044 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.138071 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.138089 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.241614 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.241662 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.241678 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.241699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.241714 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.344689 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.344751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.344774 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.344802 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.344821 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.447147 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.447194 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.447212 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.447238 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.447254 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.550235 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.550332 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.550357 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.550396 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.550417 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.652979 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.653042 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.653062 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.653518 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.653574 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.756192 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.756270 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.756297 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.756325 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.756346 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.859317 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.859410 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.859429 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.859452 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.859472 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.881872 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.881915 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:42 crc kubenswrapper[4745]: E1208 00:08:42.882077 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:42 crc kubenswrapper[4745]: E1208 00:08:42.882224 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.962124 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.962224 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.962253 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.962285 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:42 crc kubenswrapper[4745]: I1208 00:08:42.962308 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:42Z","lastTransitionTime":"2025-12-08T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.065751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.065808 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.065857 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.065882 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.065902 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.168594 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.168658 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.168674 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.168699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.168716 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.271185 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.271248 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.271265 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.271290 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.271308 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.374132 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.374219 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.374234 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.374259 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.374277 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.477979 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.478047 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.478065 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.478093 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.478110 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.581966 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.582053 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.582070 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.582096 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.582113 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.685670 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.685768 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.685786 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.685811 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.685827 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.790762 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.790840 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.790864 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.790890 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.790907 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.882011 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.882031 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:43 crc kubenswrapper[4745]: E1208 00:08:43.882189 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:43 crc kubenswrapper[4745]: E1208 00:08:43.882373 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.894867 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.894969 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.894990 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.895016 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.895037 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.997314 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.997374 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.997393 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.997420 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:43 crc kubenswrapper[4745]: I1208 00:08:43.997438 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:43Z","lastTransitionTime":"2025-12-08T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.100426 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.100820 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.100838 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.100863 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.100882 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.202957 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.203016 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.203033 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.203052 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.203064 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.306643 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.306696 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.306713 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.306736 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.306752 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.410278 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.410333 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.410359 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.410392 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.410416 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.514131 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.514186 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.514205 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.514231 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.514256 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.617035 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.617084 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.617096 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.617113 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.617124 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.720906 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.721036 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.721050 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.721069 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.721084 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.823478 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.823547 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.823565 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.823590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.823611 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.882002 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:44 crc kubenswrapper[4745]: E1208 00:08:44.882235 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.882313 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:44 crc kubenswrapper[4745]: E1208 00:08:44.883182 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.883612 4745 scope.go:117] "RemoveContainer" containerID="f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.897700 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.915251 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.925722 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.925755 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.925766 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.925795 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.925807 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:44Z","lastTransitionTime":"2025-12-08T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.931320 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.953738 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.970650 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.984015 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:44 crc kubenswrapper[4745]: I1208 00:08:44.996660 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:44Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.008361 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.018589 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"2025-12-08T00:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82\\\\n2025-12-08T00:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82 to /host/opt/cni/bin/\\\\n2025-12-08T00:07:48Z [verbose] multus-daemon started\\\\n2025-12-08T00:07:48Z [verbose] Readiness Indicator file check\\\\n2025-12-08T00:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.028576 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.028617 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.028629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.028645 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.028657 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.030825 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.040127 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.060079 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.074847 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.090282 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.107938 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.119540 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.130721 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.130747 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.130756 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.130768 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.130786 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.136394 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.150527 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.165877 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:45Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.233361 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.233417 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.233435 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.233459 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.233476 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.336533 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.336610 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.336629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.336654 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.336673 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.440102 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.440179 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.440207 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.440238 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.440261 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.543462 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.543532 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.543553 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.543583 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.543606 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.646586 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.646637 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.646654 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.646677 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.646694 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.749695 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.749741 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.749753 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.749769 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.749781 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.852378 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.852427 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.852440 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.852459 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.852473 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.882183 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.882244 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:45 crc kubenswrapper[4745]: E1208 00:08:45.882392 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:45 crc kubenswrapper[4745]: E1208 00:08:45.882525 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.956777 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.956820 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.956833 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.956849 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:45 crc kubenswrapper[4745]: I1208 00:08:45.956861 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:45Z","lastTransitionTime":"2025-12-08T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.060051 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.060097 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.060111 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.060132 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.060148 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.162193 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.162272 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.162305 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.162335 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.162355 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.265481 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.265566 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.265590 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.265617 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.265638 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.367417 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.367781 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.367963 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.368098 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.368231 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.372909 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/2.log" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.376972 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.379626 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.392571 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.408574 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.424101 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.436189 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.456538 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.471556 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.471624 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.471649 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.471680 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.471702 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.471795 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.488590 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.509191 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.525977 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.542981 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"2025-12-08T00:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82\\\\n2025-12-08T00:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82 to /host/opt/cni/bin/\\\\n2025-12-08T00:07:48Z [verbose] multus-daemon started\\\\n2025-12-08T00:07:48Z [verbose] Readiness Indicator file check\\\\n2025-12-08T00:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.557434 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.567999 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.573778 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.573811 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.573822 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.573838 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.573849 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.606736 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.629310 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.648238 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.669286 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.676254 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.676550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.676713 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.676862 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.677052 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.688584 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.708620 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.727112 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:46Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.779626 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.779691 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.779710 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.779735 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.779753 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.882176 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.882506 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.882649 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: E1208 00:08:46.882665 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.882693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.882711 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.882737 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.882755 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:46 crc kubenswrapper[4745]: E1208 00:08:46.882814 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.986257 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.986340 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.986359 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.986382 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:46 crc kubenswrapper[4745]: I1208 00:08:46.986399 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:46Z","lastTransitionTime":"2025-12-08T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.089297 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.089348 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.089364 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.089388 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.089404 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.192344 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.192403 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.192421 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.192446 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.192464 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.295858 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.295920 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.295966 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.295989 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.296006 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.382672 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/3.log" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.383837 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/2.log" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.387612 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" exitCode=1 Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.387689 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.387749 4745 scope.go:117] "RemoveContainer" containerID="f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.388745 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.389047 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.399301 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.399550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.399731 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.399912 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.400185 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.410426 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.430257 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.452729 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.472514 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.503517 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.503318 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f574b37a522cbef3c689728f0501e01233d12067af802c6265d1a27464d21933\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:16Z\\\",\\\"message\\\":\\\"ions:[]Condition{},},}\\\\nI1208 00:08:15.920310 6428 services_controller.go:451] Built service openshift-multus/multus-admission-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-multus/multus-admission-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-multus/multus-admission-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.119\\\\\\\", Port:8443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1208 00:08:15.920266 6428 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/control-plane-machine-set-operator]} name:Service_openshift-machine-api/control-plane-machine-set-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:46Z\\\",\\\"message\\\":\\\"tor.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:08:46.582967 6812 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 00:08:46.583137 6812 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:08:46.580013 6812 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1208 00:08:46.583573 6812 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1208 00:08:46.583591 6812 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 00:08:46.583641 6812 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 00:08:46.583653 6812 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1208 00:08:46.583674 6812 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 00:08:46.583682 6812 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1208 00:08:46.583696 6812 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 00:08:46.583768 6812 factory.go:656] Stopping watch factory\\\\nI1208 00:08:46.583800 6812 ovnkube.go:599] Stopped ovnkube\\\\nI1208 00:08:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.503577 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.503828 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.503866 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.503887 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.520414 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.536361 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.553924 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.573796 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.600687 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"2025-12-08T00:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82\\\\n2025-12-08T00:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82 to /host/opt/cni/bin/\\\\n2025-12-08T00:07:48Z [verbose] multus-daemon started\\\\n2025-12-08T00:07:48Z [verbose] Readiness Indicator file check\\\\n2025-12-08T00:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.609494 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.609760 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.609895 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.610051 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.610187 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.623005 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.639283 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.672485 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.692642 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.714307 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.715523 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.715836 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.716106 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.716148 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.715130 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.733088 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.749865 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.770808 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.783292 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.783406 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.783507 4745 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.783605 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:51.783562371 +0000 UTC m=+147.212768701 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.783669 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:09:51.783640523 +0000 UTC m=+147.212846863 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.783738 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.783834 4745 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.783896 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-08 00:09:51.7838792 +0000 UTC m=+147.213085510 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.788322 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:47Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.818609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.818706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.818724 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.818748 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.818768 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.882359 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.882393 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.882572 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.882700 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.884991 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.885204 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.885227 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.885241 4745 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.885296 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-08 00:09:51.88528012 +0000 UTC m=+147.314486430 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.922095 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.922150 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.922168 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.922192 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.922212 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:47Z","lastTransitionTime":"2025-12-08T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:47 crc kubenswrapper[4745]: I1208 00:08:47.985642 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.985992 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.986052 4745 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.986075 4745 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:47 crc kubenswrapper[4745]: E1208 00:08:47.986171 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-08 00:09:51.986145646 +0000 UTC m=+147.415351976 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.013911 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.014006 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.014023 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.014047 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.014066 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.035002 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.040191 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.040388 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.040515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.040659 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.040791 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.060633 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.066269 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.066329 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.066352 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.066380 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.066399 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.088534 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.093876 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.093980 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.094011 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.094039 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.094057 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.116394 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.121914 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.122011 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.122031 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.122061 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.122082 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.143746 4745 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"14c0f51d-529f-4632-8014-1290968372b9\\\",\\\"systemUUID\\\":\\\"9432629a-1315-4cc4-898c-8395e23ff1ce\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.144005 4745 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.146327 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.146393 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.146411 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.146435 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.146454 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.250104 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.250165 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.250181 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.250207 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.250226 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.353129 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.353211 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.353232 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.353258 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.353277 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.394551 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/3.log" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.400572 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.400859 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.420735 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fb907e19295e08184815d7af2fd6bf65fc030878ff85bd381bef081de93c70a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://354bd2ee4ae697bdb8ae88e83e43017d58dae21277f0dda97cb238b33b6a0cbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.438024 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44f083ce-ad64-45d5-971c-eca93c5bddd6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f5a58ac6c4fab76af3322136007e003d8c5ecaef3f0c9f7375c1ae216727007d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7js4h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6czdv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.468548 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.468624 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.468645 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.468673 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.468695 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.492172 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f085d3aa-7b24-4491-9503-81796e0b68d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1208 00:07:37.528354 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1208 00:07:37.529383 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-692299043/tls.crt::/tmp/serving-cert-692299043/tls.key\\\\\\\"\\\\nI1208 00:07:43.249869 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1208 00:07:43.254316 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1208 00:07:43.254351 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1208 00:07:43.254413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1208 00:07:43.254426 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1208 00:07:43.263500 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1208 00:07:43.263528 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1208 00:07:43.263558 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263613 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1208 00:07:43.263625 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1208 00:07:43.263637 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1208 00:07:43.263647 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1208 00:07:43.263656 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1208 00:07:43.266275 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.517312 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e55a1aa-814a-4e17-8259-681a1f80efe3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb198c5bd72a1650c0d7f4740d5e7de4ca13d52239ce5b9faa5ab197fcd581b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6a3e7e941656fd7783871bff0012816c119487be1f52f7120fa55f4db219964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://536fd2adc6545b10655670915582424eb19c0b75d002e574953b3b6db6260bfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b5d335a1dc80bf5b0d2e6d69dccc1de1351bd5b7c1e514fc9d81e97525cf933\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.531372 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.541546 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fsd6v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aa20835-0c9d-4fc2-865f-0ec5b1633d5c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b8cac7cdf9bf1fcc902313996ef313a710fe6b5ea6cb353e0b6ed98513244fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vxxkb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fsd6v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.554361 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"494c0a99-4094-400f-a072-51183fae347d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2839dc1946fe59785bb5e76e7e20541f12d4bacce96d9a000e076cf721448682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://445090ed105e51161484b25ef741b89f4da3976dcad01dad3e73090581353a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvntr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lsqkx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.566139 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a8f6767918076e20668758f38fae60cbd0e1c10c1af6e9e7b771f24e7f6f5de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.572444 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.572559 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.572592 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.572626 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.572667 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.592235 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:46Z\\\",\\\"message\\\":\\\"tor.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:08:46.582967 6812 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1208 00:08:46.583137 6812 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1208 00:08:46.580013 6812 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1208 00:08:46.583573 6812 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1208 00:08:46.583591 6812 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1208 00:08:46.583641 6812 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1208 00:08:46.583653 6812 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1208 00:08:46.583674 6812 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1208 00:08:46.583682 6812 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1208 00:08:46.583696 6812 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1208 00:08:46.583768 6812 factory.go:656] Stopping watch factory\\\\nI1208 00:08:46.583800 6812 ovnkube.go:599] Stopped ovnkube\\\\nI1208 00:08:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:08:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xb9zl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5c9xn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.607524 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c402d875-2477-4bda-872a-da631b5b5ff7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:00Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g8g2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:08:00Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-zpkz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.616988 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b9d622a6-07af-4f4d-beb1-50bd1b2d7926\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fb0ed0a3b1dcb4a78338dfc9a515ca7826b7f99c44f131c843981790fd3de6ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82d1168af98d9cab9d2cbe0eca939413015647d8366aebd2625ef37e0fa9c422\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.630146 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eeae1346-0f75-493c-be32-a3bf69db720f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d38f359fa7b2377fc4647c5b214bcec89662be009bd0f1074a1fb1fd1491a31a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://823058b4f9689c74526406ef6f5ef387191f2be996f935c1b9da2c1ae27b0113\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8526e1338ba0da672327e53fe156014807c9d2174baa7656944005b7a24ab189\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.644524 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c9d7f3dff2eb3f11c5a057bb39ad4ab1d491b07ff4087861a32561ff362469f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.663347 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pk459" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73d47ce8-04b5-4dba-aa14-655581a103a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:08:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-08T00:08:33Z\\\",\\\"message\\\":\\\"2025-12-08T00:07:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82\\\\n2025-12-08T00:07:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_f72882d3-a727-4139-8160-fe8736cfcb82 to /host/opt/cni/bin/\\\\n2025-12-08T00:07:48Z [verbose] multus-daemon started\\\\n2025-12-08T00:07:48Z [verbose] Readiness Indicator file check\\\\n2025-12-08T00:08:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:08:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7q442\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pk459\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.674808 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.674882 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.674908 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.674974 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.674998 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.683551 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84d30d10-c052-4bf5-85d9-a2d13fff0750\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f63c7a6fa3250a39532c181c6d6fb867f5e4e86b85b23173aac4397093c414bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9cc7f3fffec54b895bf3e6cae022ad3cdfe7cedf19ab85d886e207646ca90eef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71f1c5fe64acb2d756b50e39546215d725de08c3feca812aa079a69729ded667\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1df66526f667e63fca7121cbe2051b602860128e2a41b6b912f27441f0c8a83\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://120a4c69d834c3f1a1433696ea15daad17587f131a21bb141fbcc2d376e092a8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3004704f09bc141609edc75b23c0923c10bd11acb64f22e0f7622f1d72dae752\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://987e29578eee6148e4cefec811b6bfc910d38f946aff016fc72875015527b388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6lg7n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:45Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wdjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.697422 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-q7cpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3283a9b2-6c40-47e3-a219-3e203a77ad0b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fa009c56f4a6cd38e6d70850c08313fa47847dab51224c89f8e280babed6e634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltwtx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:48Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-q7cpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.730564 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58a6fa96-0a6c-47ee-8ffa-05da76242052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a13e0a538970bbe1159d117050641a027e3be607dd1a2a50bbcf4064675bcb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f476ab5a1dda45d1efce4757745cf6472475a2a72523db31a381951c8d264f0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a03769e178a2e60690e55039e26449e781d1531ce0d53b322ca3cbec38333f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d41e18a1ea3fc3c9f2e430f4e83e64093c7d2dc11e4e26e0f50104ecf6d7b509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d729e2bac893d6e9752984a9065385fafcba98185980ef776ae85b1d0854b31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-08T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fee6a7b33de4af4c6845acf959c9acd01a9d741cd584abde072e6fb078a275a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://243102be5c76d2c404e2ad69c483d6f6db405217b6ae67ed8c2e99d6eb3815a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5bd9df661ec2ea7bd1160a3b7a55b82e716984ae24822b5285dccaf2c114c19d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-08T00:07:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-08T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-08T00:07:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.749794 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.771581 4745 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-08T00:07:43Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-08T00:08:48Z is after 2025-08-24T17:21:41Z" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.777671 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.777865 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.778050 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.778190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.778311 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.881195 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.881251 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.881268 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.881292 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.881309 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.881996 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.882308 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.882482 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:48 crc kubenswrapper[4745]: E1208 00:08:48.883258 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.984292 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.984342 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.984360 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.984383 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:48 crc kubenswrapper[4745]: I1208 00:08:48.984401 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:48Z","lastTransitionTime":"2025-12-08T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.087542 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.087621 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.087645 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.087673 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.087689 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.190009 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.190119 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.190140 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.190161 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.190174 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.293730 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.293805 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.293830 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.293861 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.293884 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.397274 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.397348 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.397365 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.397388 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.397405 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.500780 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.500841 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.500857 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.500882 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.500899 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.603543 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.603609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.603628 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.603652 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.603670 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.706766 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.706860 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.706895 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.706966 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.706990 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.810789 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.810884 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.810903 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.810953 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.810980 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.882424 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.882647 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:49 crc kubenswrapper[4745]: E1208 00:08:49.882853 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:49 crc kubenswrapper[4745]: E1208 00:08:49.883087 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.914481 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.914544 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.914569 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.914599 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:49 crc kubenswrapper[4745]: I1208 00:08:49.914620 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:49Z","lastTransitionTime":"2025-12-08T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.017956 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.018014 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.018031 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.018054 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.018073 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.121378 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.121460 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.121487 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.121517 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.121536 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.225375 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.225432 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.225453 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.225482 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.225502 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.328653 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.328750 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.328799 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.328832 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.328851 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.434336 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.434393 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.434409 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.434435 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.434451 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.537997 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.538070 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.538106 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.538138 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.538159 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.641527 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.641604 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.641627 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.641658 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.641679 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.744137 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.744202 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.744217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.744242 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.744257 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.848831 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.848904 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.848917 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.848955 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.848967 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.882485 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.882626 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:50 crc kubenswrapper[4745]: E1208 00:08:50.882694 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:50 crc kubenswrapper[4745]: E1208 00:08:50.882959 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.955508 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.955569 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.955587 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.955612 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:50 crc kubenswrapper[4745]: I1208 00:08:50.955629 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:50Z","lastTransitionTime":"2025-12-08T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.058877 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.059023 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.059042 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.059067 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.059084 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.162541 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.162596 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.162615 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.162638 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.162653 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.264894 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.264945 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.264953 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.264971 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.264987 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.367439 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.367477 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.367486 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.367499 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.367507 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.470164 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.470204 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.470217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.470235 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.470252 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.573390 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.573447 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.573471 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.573496 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.573511 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.676252 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.676302 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.676316 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.676333 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.676345 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.779535 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.779589 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.779607 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.779629 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.779646 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.881694 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.881694 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:51 crc kubenswrapper[4745]: E1208 00:08:51.882115 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:51 crc kubenswrapper[4745]: E1208 00:08:51.882334 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.882695 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.883465 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.883797 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.883972 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.884144 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.987532 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.987879 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.988079 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.988232 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:51 crc kubenswrapper[4745]: I1208 00:08:51.988367 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:51Z","lastTransitionTime":"2025-12-08T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.091741 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.091782 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.091791 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.091806 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.091818 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.198095 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.198239 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.198264 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.198303 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.198323 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.301388 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.301480 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.301502 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.301530 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.301549 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.404846 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.404904 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.404954 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.404983 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.405005 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.508029 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.508437 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.508592 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.508744 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.508885 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.612209 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.612276 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.612296 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.612322 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.612343 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.715858 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.715957 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.715981 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.716015 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.716038 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.819603 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.819662 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.819679 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.819703 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.819721 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.882396 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:52 crc kubenswrapper[4745]: E1208 00:08:52.882590 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.882889 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:52 crc kubenswrapper[4745]: E1208 00:08:52.883918 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.923449 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.923515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.923536 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.923565 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:52 crc kubenswrapper[4745]: I1208 00:08:52.923587 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:52Z","lastTransitionTime":"2025-12-08T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.026343 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.026400 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.026422 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.026451 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.026473 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.129751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.129809 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.129831 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.129862 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.129883 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.232860 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.232951 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.232977 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.233007 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.233028 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.336065 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.336120 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.336138 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.336163 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.336181 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.439130 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.439203 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.439223 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.439247 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.439265 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.542031 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.542099 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.542128 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.542160 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.542185 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.645897 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.645993 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.646016 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.646045 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.646066 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.749016 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.749456 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.749656 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.749857 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.750042 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.853301 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.853368 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.853390 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.853418 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.853439 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.881918 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.881994 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:53 crc kubenswrapper[4745]: E1208 00:08:53.882153 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:53 crc kubenswrapper[4745]: E1208 00:08:53.882305 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.956329 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.956400 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.956423 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.956447 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:53 crc kubenswrapper[4745]: I1208 00:08:53.956465 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:53Z","lastTransitionTime":"2025-12-08T00:08:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.058766 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.058822 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.058839 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.058863 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.058880 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.161866 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.162045 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.162064 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.162090 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.162111 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.265404 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.265493 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.265520 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.265552 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.265577 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.369434 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.369506 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.369530 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.369561 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.369582 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.472400 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.472455 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.472475 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.472498 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.472515 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.576292 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.576358 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.576375 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.576400 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.576421 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.679675 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.679736 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.679754 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.679777 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.679796 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.783016 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.783076 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.783094 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.783116 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.783134 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.882086 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.882122 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:54 crc kubenswrapper[4745]: E1208 00:08:54.882414 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:54 crc kubenswrapper[4745]: E1208 00:08:54.882501 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.886625 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.886686 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.886705 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.886727 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.886769 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.912717 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-fsd6v" podStartSLOduration=70.912641414 podStartE2EDuration="1m10.912641414s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:54.911873272 +0000 UTC m=+90.341079612" watchObservedRunningTime="2025-12-08 00:08:54.912641414 +0000 UTC m=+90.341847744" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.959780 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsqkx" podStartSLOduration=69.959750949 podStartE2EDuration="1m9.959750949s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:54.938728612 +0000 UTC m=+90.367934962" watchObservedRunningTime="2025-12-08 00:08:54.959750949 +0000 UTC m=+90.388957289" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.960314 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=26.960299884 podStartE2EDuration="26.960299884s" podCreationTimestamp="2025-12-08 00:08:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:54.959104851 +0000 UTC m=+90.388311251" watchObservedRunningTime="2025-12-08 00:08:54.960299884 +0000 UTC m=+90.389506234" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.982876 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=70.982858624 podStartE2EDuration="1m10.982858624s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:54.982704939 +0000 UTC m=+90.411911279" watchObservedRunningTime="2025-12-08 00:08:54.982858624 +0000 UTC m=+90.412064934" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.989360 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.989388 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.989396 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.989409 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:54 crc kubenswrapper[4745]: I1208 00:08:54.989420 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:54Z","lastTransitionTime":"2025-12-08T00:08:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.073694 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.073674959 podStartE2EDuration="1m8.073674959s" podCreationTimestamp="2025-12-08 00:07:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:55.071770736 +0000 UTC m=+90.500977046" watchObservedRunningTime="2025-12-08 00:08:55.073674959 +0000 UTC m=+90.502881259" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.092550 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.092578 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.092586 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.092599 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.092609 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.115310 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-pk459" podStartSLOduration=71.115293711 podStartE2EDuration="1m11.115293711s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:55.114227811 +0000 UTC m=+90.543434111" watchObservedRunningTime="2025-12-08 00:08:55.115293711 +0000 UTC m=+90.544500011" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.132001 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-wdjvp" podStartSLOduration=71.131977246 podStartE2EDuration="1m11.131977246s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:55.13175776 +0000 UTC m=+90.560964070" watchObservedRunningTime="2025-12-08 00:08:55.131977246 +0000 UTC m=+90.561183546" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.161861 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-q7cpd" podStartSLOduration=71.16184244 podStartE2EDuration="1m11.16184244s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:55.14570464 +0000 UTC m=+90.574910940" watchObservedRunningTime="2025-12-08 00:08:55.16184244 +0000 UTC m=+90.591048750" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.162530 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=71.162524809 podStartE2EDuration="1m11.162524809s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:55.162431876 +0000 UTC m=+90.591638186" watchObservedRunningTime="2025-12-08 00:08:55.162524809 +0000 UTC m=+90.591731109" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.187982 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.187962619 podStartE2EDuration="37.187962619s" podCreationTimestamp="2025-12-08 00:08:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:55.174950056 +0000 UTC m=+90.604156356" watchObservedRunningTime="2025-12-08 00:08:55.187962619 +0000 UTC m=+90.617168919" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.194699 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.194751 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.194765 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.194782 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.194793 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.216490 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podStartSLOduration=71.216471725 podStartE2EDuration="1m11.216471725s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:08:55.215903679 +0000 UTC m=+90.645109989" watchObservedRunningTime="2025-12-08 00:08:55.216471725 +0000 UTC m=+90.645678015" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.297844 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.297887 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.297896 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.297912 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.297944 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.401080 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.401135 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.401152 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.401177 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.401195 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.505976 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.506054 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.506076 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.506105 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.506129 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.610460 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.610552 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.610586 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.610618 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.610637 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.732982 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.733050 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.733067 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.733095 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.733113 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.836513 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.836614 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.836634 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.836660 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.836681 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.882440 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.882561 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:55 crc kubenswrapper[4745]: E1208 00:08:55.882637 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:55 crc kubenswrapper[4745]: E1208 00:08:55.882746 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.939671 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.939832 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.939858 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.939896 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:55 crc kubenswrapper[4745]: I1208 00:08:55.939921 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:55Z","lastTransitionTime":"2025-12-08T00:08:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.043134 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.043188 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.043213 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.043242 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.043263 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.147393 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.147486 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.147511 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.147551 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.147580 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.251190 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.251255 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.251272 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.251296 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.251313 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.354301 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.354368 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.354388 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.354412 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.354432 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.457317 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.457487 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.457518 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.457551 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.457575 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.562189 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.562252 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.562268 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.562297 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.562312 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.665353 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.665394 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.665405 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.665422 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.665438 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.768429 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.768492 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.768508 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.768532 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.768550 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.871608 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.871674 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.871692 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.871718 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.871747 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.882615 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.882914 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:56 crc kubenswrapper[4745]: E1208 00:08:56.883069 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:56 crc kubenswrapper[4745]: E1208 00:08:56.883172 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.974897 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.974984 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.975004 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.975030 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:56 crc kubenswrapper[4745]: I1208 00:08:56.975051 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:56Z","lastTransitionTime":"2025-12-08T00:08:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.078084 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.078162 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.078179 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.078204 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.078220 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.181782 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.181843 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.181861 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.181884 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.181901 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.285415 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.285515 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.285541 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.285577 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.285600 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.388721 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.388779 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.388795 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.388822 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.388845 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.491612 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.491675 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.491693 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.491715 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.491734 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.595376 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.595444 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.595461 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.595487 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.595509 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.698402 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.698463 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.698481 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.698505 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.698522 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.801502 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.801570 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.801588 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.801612 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.801630 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.882266 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.882378 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:57 crc kubenswrapper[4745]: E1208 00:08:57.882479 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:08:57 crc kubenswrapper[4745]: E1208 00:08:57.882583 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.904609 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.904683 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.904706 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.904736 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:57 crc kubenswrapper[4745]: I1208 00:08:57.904759 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:57Z","lastTransitionTime":"2025-12-08T00:08:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.008051 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.008113 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.008131 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.008156 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.008175 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:58Z","lastTransitionTime":"2025-12-08T00:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.111089 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.111200 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.111217 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.111242 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.111261 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:58Z","lastTransitionTime":"2025-12-08T00:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.215424 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.215481 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.215502 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.215530 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.215546 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:58Z","lastTransitionTime":"2025-12-08T00:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.301639 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.301709 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.301728 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.301754 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.301773 4745 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-08T00:08:58Z","lastTransitionTime":"2025-12-08T00:08:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.365585 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq"] Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.366180 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.369253 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.370030 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.370143 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.372124 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.398485 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70c1725b-11f4-49a2-bef5-ceeabf40fccd-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.398585 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/70c1725b-11f4-49a2-bef5-ceeabf40fccd-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.398862 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/70c1725b-11f4-49a2-bef5-ceeabf40fccd-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.399071 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/70c1725b-11f4-49a2-bef5-ceeabf40fccd-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.399217 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70c1725b-11f4-49a2-bef5-ceeabf40fccd-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.500821 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70c1725b-11f4-49a2-bef5-ceeabf40fccd-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.500877 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/70c1725b-11f4-49a2-bef5-ceeabf40fccd-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.501010 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/70c1725b-11f4-49a2-bef5-ceeabf40fccd-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.501051 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/70c1725b-11f4-49a2-bef5-ceeabf40fccd-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.501118 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70c1725b-11f4-49a2-bef5-ceeabf40fccd-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.501134 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/70c1725b-11f4-49a2-bef5-ceeabf40fccd-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.501568 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/70c1725b-11f4-49a2-bef5-ceeabf40fccd-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.502762 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/70c1725b-11f4-49a2-bef5-ceeabf40fccd-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.510120 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70c1725b-11f4-49a2-bef5-ceeabf40fccd-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.533259 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70c1725b-11f4-49a2-bef5-ceeabf40fccd-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6gwhq\" (UID: \"70c1725b-11f4-49a2-bef5-ceeabf40fccd\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.687870 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" Dec 08 00:08:58 crc kubenswrapper[4745]: W1208 00:08:58.710798 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70c1725b_11f4_49a2_bef5_ceeabf40fccd.slice/crio-74b9cb6fe3caa1028b702648ebcda61d20683f57f9678ca139b76032f3eb7204 WatchSource:0}: Error finding container 74b9cb6fe3caa1028b702648ebcda61d20683f57f9678ca139b76032f3eb7204: Status 404 returned error can't find the container with id 74b9cb6fe3caa1028b702648ebcda61d20683f57f9678ca139b76032f3eb7204 Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.882505 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.882698 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:08:58 crc kubenswrapper[4745]: E1208 00:08:58.882818 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:08:58 crc kubenswrapper[4745]: E1208 00:08:58.883150 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:08:58 crc kubenswrapper[4745]: I1208 00:08:58.885098 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:08:58 crc kubenswrapper[4745]: E1208 00:08:58.885480 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:08:59 crc kubenswrapper[4745]: I1208 00:08:59.436540 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" event={"ID":"70c1725b-11f4-49a2-bef5-ceeabf40fccd","Type":"ContainerStarted","Data":"18fa7e2b9d9680986d9db57f391d7352b9b29d120827c8d39001795b53e69d98"} Dec 08 00:08:59 crc kubenswrapper[4745]: I1208 00:08:59.436611 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" event={"ID":"70c1725b-11f4-49a2-bef5-ceeabf40fccd","Type":"ContainerStarted","Data":"74b9cb6fe3caa1028b702648ebcda61d20683f57f9678ca139b76032f3eb7204"} Dec 08 00:08:59 crc kubenswrapper[4745]: I1208 00:08:59.881903 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:08:59 crc kubenswrapper[4745]: E1208 00:08:59.882033 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:08:59 crc kubenswrapper[4745]: I1208 00:08:59.882168 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:08:59 crc kubenswrapper[4745]: E1208 00:08:59.882277 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:00 crc kubenswrapper[4745]: I1208 00:09:00.883392 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:00 crc kubenswrapper[4745]: E1208 00:09:00.883467 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:00 crc kubenswrapper[4745]: I1208 00:09:00.883609 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:00 crc kubenswrapper[4745]: E1208 00:09:00.883651 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:01 crc kubenswrapper[4745]: I1208 00:09:01.882342 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:01 crc kubenswrapper[4745]: I1208 00:09:01.882377 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:01 crc kubenswrapper[4745]: E1208 00:09:01.882506 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:01 crc kubenswrapper[4745]: E1208 00:09:01.882618 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:02 crc kubenswrapper[4745]: I1208 00:09:02.881762 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:02 crc kubenswrapper[4745]: I1208 00:09:02.881837 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:02 crc kubenswrapper[4745]: E1208 00:09:02.882052 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:02 crc kubenswrapper[4745]: E1208 00:09:02.882143 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:03 crc kubenswrapper[4745]: I1208 00:09:03.882423 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:03 crc kubenswrapper[4745]: I1208 00:09:03.882453 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:03 crc kubenswrapper[4745]: E1208 00:09:03.882594 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:03 crc kubenswrapper[4745]: E1208 00:09:03.882845 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:04 crc kubenswrapper[4745]: I1208 00:09:04.364989 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:04 crc kubenswrapper[4745]: E1208 00:09:04.365141 4745 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:09:04 crc kubenswrapper[4745]: E1208 00:09:04.365198 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs podName:c402d875-2477-4bda-872a-da631b5b5ff7 nodeName:}" failed. No retries permitted until 2025-12-08 00:10:08.365180655 +0000 UTC m=+163.794386955 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs") pod "network-metrics-daemon-zpkz9" (UID: "c402d875-2477-4bda-872a-da631b5b5ff7") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 08 00:09:04 crc kubenswrapper[4745]: I1208 00:09:04.882390 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:04 crc kubenswrapper[4745]: I1208 00:09:04.882416 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:04 crc kubenswrapper[4745]: E1208 00:09:04.884028 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:04 crc kubenswrapper[4745]: E1208 00:09:04.884261 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:05 crc kubenswrapper[4745]: I1208 00:09:05.881609 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:05 crc kubenswrapper[4745]: I1208 00:09:05.881648 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:05 crc kubenswrapper[4745]: E1208 00:09:05.882209 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:05 crc kubenswrapper[4745]: E1208 00:09:05.882438 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:06 crc kubenswrapper[4745]: I1208 00:09:06.882402 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:06 crc kubenswrapper[4745]: I1208 00:09:06.882402 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:06 crc kubenswrapper[4745]: E1208 00:09:06.882639 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:06 crc kubenswrapper[4745]: E1208 00:09:06.882709 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:07 crc kubenswrapper[4745]: I1208 00:09:07.882575 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:07 crc kubenswrapper[4745]: I1208 00:09:07.882607 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:07 crc kubenswrapper[4745]: E1208 00:09:07.882764 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:07 crc kubenswrapper[4745]: E1208 00:09:07.883253 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:08 crc kubenswrapper[4745]: I1208 00:09:08.882219 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:08 crc kubenswrapper[4745]: I1208 00:09:08.882381 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:08 crc kubenswrapper[4745]: E1208 00:09:08.882512 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:08 crc kubenswrapper[4745]: E1208 00:09:08.882594 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:09 crc kubenswrapper[4745]: I1208 00:09:09.882319 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:09 crc kubenswrapper[4745]: I1208 00:09:09.882334 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:09 crc kubenswrapper[4745]: E1208 00:09:09.882535 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:09 crc kubenswrapper[4745]: E1208 00:09:09.882616 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:10 crc kubenswrapper[4745]: I1208 00:09:10.882017 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:10 crc kubenswrapper[4745]: E1208 00:09:10.882210 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:10 crc kubenswrapper[4745]: I1208 00:09:10.882325 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:10 crc kubenswrapper[4745]: E1208 00:09:10.882621 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:11 crc kubenswrapper[4745]: I1208 00:09:11.882319 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:11 crc kubenswrapper[4745]: I1208 00:09:11.882404 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:11 crc kubenswrapper[4745]: E1208 00:09:11.882501 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:11 crc kubenswrapper[4745]: E1208 00:09:11.882590 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:12 crc kubenswrapper[4745]: I1208 00:09:12.882625 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:12 crc kubenswrapper[4745]: E1208 00:09:12.883358 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:12 crc kubenswrapper[4745]: I1208 00:09:12.882671 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:12 crc kubenswrapper[4745]: E1208 00:09:12.883562 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:13 crc kubenswrapper[4745]: I1208 00:09:13.882690 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:13 crc kubenswrapper[4745]: I1208 00:09:13.882724 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:13 crc kubenswrapper[4745]: E1208 00:09:13.882877 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:13 crc kubenswrapper[4745]: E1208 00:09:13.883579 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:13 crc kubenswrapper[4745]: I1208 00:09:13.883994 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:09:13 crc kubenswrapper[4745]: E1208 00:09:13.884233 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:09:14 crc kubenswrapper[4745]: I1208 00:09:14.882379 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:14 crc kubenswrapper[4745]: I1208 00:09:14.882647 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:14 crc kubenswrapper[4745]: E1208 00:09:14.884460 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:14 crc kubenswrapper[4745]: E1208 00:09:14.884691 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:15 crc kubenswrapper[4745]: I1208 00:09:15.881985 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:15 crc kubenswrapper[4745]: E1208 00:09:15.882182 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:15 crc kubenswrapper[4745]: I1208 00:09:15.881985 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:15 crc kubenswrapper[4745]: E1208 00:09:15.882627 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:16 crc kubenswrapper[4745]: I1208 00:09:16.881886 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:16 crc kubenswrapper[4745]: I1208 00:09:16.882330 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:16 crc kubenswrapper[4745]: E1208 00:09:16.882491 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:16 crc kubenswrapper[4745]: E1208 00:09:16.882491 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:17 crc kubenswrapper[4745]: I1208 00:09:17.882668 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:17 crc kubenswrapper[4745]: I1208 00:09:17.882717 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:17 crc kubenswrapper[4745]: E1208 00:09:17.882955 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:17 crc kubenswrapper[4745]: E1208 00:09:17.883089 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:18 crc kubenswrapper[4745]: I1208 00:09:18.881681 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:18 crc kubenswrapper[4745]: I1208 00:09:18.881681 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:18 crc kubenswrapper[4745]: E1208 00:09:18.881955 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:18 crc kubenswrapper[4745]: E1208 00:09:18.882105 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.794484 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/1.log" Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.795631 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/0.log" Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.795731 4745 generic.go:334] "Generic (PLEG): container finished" podID="73d47ce8-04b5-4dba-aa14-655581a103a8" containerID="aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88" exitCode=1 Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.795797 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerDied","Data":"aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88"} Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.795867 4745 scope.go:117] "RemoveContainer" containerID="c19f3956562f169c6bd6cfa7dafd01264ed313898b3de7faa272bd5e71b9298c" Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.796910 4745 scope.go:117] "RemoveContainer" containerID="aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88" Dec 08 00:09:19 crc kubenswrapper[4745]: E1208 00:09:19.797304 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-pk459_openshift-multus(73d47ce8-04b5-4dba-aa14-655581a103a8)\"" pod="openshift-multus/multus-pk459" podUID="73d47ce8-04b5-4dba-aa14-655581a103a8" Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.830188 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6gwhq" podStartSLOduration=95.830158425 podStartE2EDuration="1m35.830158425s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:00.749388983 +0000 UTC m=+96.178595323" watchObservedRunningTime="2025-12-08 00:09:19.830158425 +0000 UTC m=+115.259364755" Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.882410 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:19 crc kubenswrapper[4745]: I1208 00:09:19.882411 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:19 crc kubenswrapper[4745]: E1208 00:09:19.882596 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:19 crc kubenswrapper[4745]: E1208 00:09:19.882712 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:20 crc kubenswrapper[4745]: I1208 00:09:20.801153 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/1.log" Dec 08 00:09:20 crc kubenswrapper[4745]: I1208 00:09:20.881887 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:20 crc kubenswrapper[4745]: I1208 00:09:20.881886 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:20 crc kubenswrapper[4745]: E1208 00:09:20.882104 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:20 crc kubenswrapper[4745]: E1208 00:09:20.882299 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:21 crc kubenswrapper[4745]: I1208 00:09:21.881641 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:21 crc kubenswrapper[4745]: E1208 00:09:21.881868 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:21 crc kubenswrapper[4745]: I1208 00:09:21.881658 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:21 crc kubenswrapper[4745]: E1208 00:09:21.882308 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:22 crc kubenswrapper[4745]: I1208 00:09:22.881805 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:22 crc kubenswrapper[4745]: I1208 00:09:22.881859 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:22 crc kubenswrapper[4745]: E1208 00:09:22.882058 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:22 crc kubenswrapper[4745]: E1208 00:09:22.882147 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:23 crc kubenswrapper[4745]: I1208 00:09:23.881704 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:23 crc kubenswrapper[4745]: E1208 00:09:23.881987 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:23 crc kubenswrapper[4745]: I1208 00:09:23.882174 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:23 crc kubenswrapper[4745]: E1208 00:09:23.882333 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:24 crc kubenswrapper[4745]: E1208 00:09:24.819244 4745 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 08 00:09:24 crc kubenswrapper[4745]: I1208 00:09:24.882258 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:24 crc kubenswrapper[4745]: I1208 00:09:24.882341 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:24 crc kubenswrapper[4745]: E1208 00:09:24.885009 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:24 crc kubenswrapper[4745]: E1208 00:09:24.885198 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:24 crc kubenswrapper[4745]: E1208 00:09:24.977977 4745 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 00:09:25 crc kubenswrapper[4745]: I1208 00:09:25.882149 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:25 crc kubenswrapper[4745]: I1208 00:09:25.882206 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:25 crc kubenswrapper[4745]: E1208 00:09:25.882342 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:25 crc kubenswrapper[4745]: E1208 00:09:25.882444 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:26 crc kubenswrapper[4745]: I1208 00:09:26.881766 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:26 crc kubenswrapper[4745]: I1208 00:09:26.881793 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:26 crc kubenswrapper[4745]: E1208 00:09:26.882006 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:26 crc kubenswrapper[4745]: E1208 00:09:26.882175 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:26 crc kubenswrapper[4745]: I1208 00:09:26.883366 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:09:26 crc kubenswrapper[4745]: E1208 00:09:26.883753 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5c9xn_openshift-ovn-kubernetes(1fc4e04e-a6e2-4897-9549-d7517e1ac92b)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" Dec 08 00:09:27 crc kubenswrapper[4745]: I1208 00:09:27.881603 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:27 crc kubenswrapper[4745]: I1208 00:09:27.881612 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:27 crc kubenswrapper[4745]: E1208 00:09:27.881771 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:27 crc kubenswrapper[4745]: E1208 00:09:27.881912 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:28 crc kubenswrapper[4745]: I1208 00:09:28.882315 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:28 crc kubenswrapper[4745]: I1208 00:09:28.882451 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:28 crc kubenswrapper[4745]: E1208 00:09:28.882529 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:28 crc kubenswrapper[4745]: E1208 00:09:28.882639 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:29 crc kubenswrapper[4745]: I1208 00:09:29.882490 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:29 crc kubenswrapper[4745]: I1208 00:09:29.882582 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:29 crc kubenswrapper[4745]: E1208 00:09:29.882675 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:29 crc kubenswrapper[4745]: E1208 00:09:29.882769 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:29 crc kubenswrapper[4745]: E1208 00:09:29.979273 4745 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 00:09:30 crc kubenswrapper[4745]: I1208 00:09:30.881871 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:30 crc kubenswrapper[4745]: I1208 00:09:30.881909 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:30 crc kubenswrapper[4745]: E1208 00:09:30.882130 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:30 crc kubenswrapper[4745]: E1208 00:09:30.882250 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:31 crc kubenswrapper[4745]: I1208 00:09:31.882468 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:31 crc kubenswrapper[4745]: I1208 00:09:31.882565 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:31 crc kubenswrapper[4745]: E1208 00:09:31.882615 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:31 crc kubenswrapper[4745]: E1208 00:09:31.882729 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:32 crc kubenswrapper[4745]: I1208 00:09:32.881672 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:32 crc kubenswrapper[4745]: I1208 00:09:32.881686 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:32 crc kubenswrapper[4745]: E1208 00:09:32.881868 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:32 crc kubenswrapper[4745]: E1208 00:09:32.881979 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:33 crc kubenswrapper[4745]: I1208 00:09:33.881583 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:33 crc kubenswrapper[4745]: E1208 00:09:33.881697 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:33 crc kubenswrapper[4745]: I1208 00:09:33.881795 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:33 crc kubenswrapper[4745]: E1208 00:09:33.882020 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:34 crc kubenswrapper[4745]: I1208 00:09:34.882064 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:34 crc kubenswrapper[4745]: I1208 00:09:34.882997 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:34 crc kubenswrapper[4745]: E1208 00:09:34.883878 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:34 crc kubenswrapper[4745]: E1208 00:09:34.884110 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:34 crc kubenswrapper[4745]: I1208 00:09:34.884735 4745 scope.go:117] "RemoveContainer" containerID="aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88" Dec 08 00:09:34 crc kubenswrapper[4745]: E1208 00:09:34.979958 4745 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 00:09:35 crc kubenswrapper[4745]: I1208 00:09:35.860117 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/1.log" Dec 08 00:09:35 crc kubenswrapper[4745]: I1208 00:09:35.860190 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerStarted","Data":"84ff2184bdcecf225a8ccfd25d0b2e058a82360914b45b7dfc646209f975032f"} Dec 08 00:09:35 crc kubenswrapper[4745]: I1208 00:09:35.885730 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:35 crc kubenswrapper[4745]: E1208 00:09:35.885962 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:35 crc kubenswrapper[4745]: I1208 00:09:35.886261 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:35 crc kubenswrapper[4745]: E1208 00:09:35.886356 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:36 crc kubenswrapper[4745]: I1208 00:09:36.882429 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:36 crc kubenswrapper[4745]: I1208 00:09:36.882475 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:36 crc kubenswrapper[4745]: E1208 00:09:36.882633 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:36 crc kubenswrapper[4745]: E1208 00:09:36.882837 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:37 crc kubenswrapper[4745]: I1208 00:09:37.882232 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:37 crc kubenswrapper[4745]: I1208 00:09:37.882255 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:37 crc kubenswrapper[4745]: E1208 00:09:37.882402 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:37 crc kubenswrapper[4745]: E1208 00:09:37.882487 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:38 crc kubenswrapper[4745]: I1208 00:09:38.881668 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:38 crc kubenswrapper[4745]: E1208 00:09:38.881804 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:38 crc kubenswrapper[4745]: I1208 00:09:38.881984 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:38 crc kubenswrapper[4745]: E1208 00:09:38.882210 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:39 crc kubenswrapper[4745]: I1208 00:09:39.882196 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:39 crc kubenswrapper[4745]: I1208 00:09:39.882218 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:39 crc kubenswrapper[4745]: E1208 00:09:39.882356 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:39 crc kubenswrapper[4745]: E1208 00:09:39.882583 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:39 crc kubenswrapper[4745]: I1208 00:09:39.883351 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:09:39 crc kubenswrapper[4745]: E1208 00:09:39.980890 4745 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.869096 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zpkz9"] Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.881721 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/3.log" Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.882191 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:40 crc kubenswrapper[4745]: E1208 00:09:40.882451 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.882838 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:40 crc kubenswrapper[4745]: E1208 00:09:40.883054 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.888691 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:40 crc kubenswrapper[4745]: E1208 00:09:40.888847 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.889743 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerStarted","Data":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.890838 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:09:40 crc kubenswrapper[4745]: I1208 00:09:40.928539 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podStartSLOduration=116.928519337 podStartE2EDuration="1m56.928519337s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:40.927477125 +0000 UTC m=+136.356683435" watchObservedRunningTime="2025-12-08 00:09:40.928519337 +0000 UTC m=+136.357725637" Dec 08 00:09:41 crc kubenswrapper[4745]: I1208 00:09:41.881606 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:41 crc kubenswrapper[4745]: E1208 00:09:41.881728 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:42 crc kubenswrapper[4745]: I1208 00:09:42.881897 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:42 crc kubenswrapper[4745]: I1208 00:09:42.882132 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:42 crc kubenswrapper[4745]: E1208 00:09:42.882284 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:42 crc kubenswrapper[4745]: E1208 00:09:42.882133 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:42 crc kubenswrapper[4745]: I1208 00:09:42.881915 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:42 crc kubenswrapper[4745]: E1208 00:09:42.882448 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:43 crc kubenswrapper[4745]: I1208 00:09:43.882479 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:43 crc kubenswrapper[4745]: E1208 00:09:43.882683 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 08 00:09:44 crc kubenswrapper[4745]: I1208 00:09:44.890691 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:44 crc kubenswrapper[4745]: I1208 00:09:44.890721 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:44 crc kubenswrapper[4745]: E1208 00:09:44.891849 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 08 00:09:44 crc kubenswrapper[4745]: E1208 00:09:44.892101 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-zpkz9" podUID="c402d875-2477-4bda-872a-da631b5b5ff7" Dec 08 00:09:44 crc kubenswrapper[4745]: I1208 00:09:44.890746 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:44 crc kubenswrapper[4745]: E1208 00:09:44.892344 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 08 00:09:45 crc kubenswrapper[4745]: I1208 00:09:45.882529 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:45 crc kubenswrapper[4745]: I1208 00:09:45.884304 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 08 00:09:45 crc kubenswrapper[4745]: I1208 00:09:45.886005 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 08 00:09:46 crc kubenswrapper[4745]: I1208 00:09:46.881780 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:46 crc kubenswrapper[4745]: I1208 00:09:46.881780 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:09:46 crc kubenswrapper[4745]: I1208 00:09:46.881881 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:46 crc kubenswrapper[4745]: I1208 00:09:46.884626 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 08 00:09:46 crc kubenswrapper[4745]: I1208 00:09:46.886295 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 08 00:09:46 crc kubenswrapper[4745]: I1208 00:09:46.886477 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 08 00:09:46 crc kubenswrapper[4745]: I1208 00:09:46.886793 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.924148 4745 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.984401 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lx4dj"] Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.985505 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.988812 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7xchq"] Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.989645 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.991654 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dktn5"] Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.993524 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.994547 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r"] Dec 08 00:09:48 crc kubenswrapper[4745]: I1208 00:09:48.995661 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:48.998195 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:48.999113 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:48.999342 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.000049 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.000355 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.000453 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.000859 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.001535 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.002161 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.002483 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.004824 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.005345 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.006053 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.008680 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.013883 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.015321 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.015827 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.021391 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.021721 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.021991 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.022217 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.022636 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023214 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023370 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023687 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023238 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.022728 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023261 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023332 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023476 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023537 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023571 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023577 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023626 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.023644 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.024550 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.024869 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b9kth"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025098 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025266 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025335 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025438 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025583 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025734 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025181 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.025993 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.028446 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-k98nn"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.029275 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.029328 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.035364 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.035548 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.035667 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.035868 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.036035 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.036369 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.036538 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.036716 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.039738 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.041612 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.041697 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.066334 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.069445 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29419200-2mqnz"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.070063 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.070168 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.070091 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8cg7l"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.072221 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.072303 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.073218 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.073399 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.073605 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.073616 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.073720 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.073846 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.073998 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.074145 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.074581 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.074635 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.074587 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.078849 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.078947 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.079128 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.079171 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.083310 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-j77jf"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.091107 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.091409 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.091615 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.091743 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.092200 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.092259 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.092350 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.092433 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.093190 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.093615 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.093635 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.094000 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.095343 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.095747 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-b2dbs"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.095962 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.096031 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-b2dbs" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.096821 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.097289 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.104485 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.104790 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.106268 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.106368 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.106495 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.106615 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.107130 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.107337 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.107488 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.107630 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.107776 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108002 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108239 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108327 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vxh4\" (UniqueName: \"kubernetes.io/projected/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-kube-api-access-9vxh4\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108361 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108383 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/dbea8073-c662-4a72-871b-0abf65d79bc7-machine-approver-tls\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108403 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108423 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-serving-cert\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108459 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-available-featuregates\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108476 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqvw2\" (UniqueName: \"kubernetes.io/projected/dbea8073-c662-4a72-871b-0abf65d79bc7-kube-api-access-jqvw2\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108494 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-etcd-client\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108509 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108525 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108542 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnwqm\" (UniqueName: \"kubernetes.io/projected/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-kube-api-access-fnwqm\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108558 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-image-import-ca\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108582 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-etcd-client\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108597 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbea8073-c662-4a72-871b-0abf65d79bc7-auth-proxy-config\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108610 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-etcd-serving-ca\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108628 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-encryption-config\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108642 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-audit\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108676 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2dnz\" (UniqueName: \"kubernetes.io/projected/623b96d9-884a-4dea-a6a9-301f384d8666-kube-api-access-v2dnz\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108692 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-config\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108707 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cd1f7237-e796-4af1-b911-b15b54030e38-node-pullsecrets\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108720 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-serving-cert\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108736 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-config\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108752 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cd1f7237-e796-4af1-b911-b15b54030e38-audit-dir\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108770 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108785 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/623b96d9-884a-4dea-a6a9-301f384d8666-serving-cert\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108815 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzvcr\" (UniqueName: \"kubernetes.io/projected/1947b883-d3db-45c3-951b-4025e2517403-kube-api-access-dzvcr\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108830 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx6jb\" (UniqueName: \"kubernetes.io/projected/82d71418-4a49-437a-8429-1f0569d205b0-kube-api-access-nx6jb\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108846 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkr94\" (UniqueName: \"kubernetes.io/projected/cd1f7237-e796-4af1-b911-b15b54030e38-kube-api-access-vkr94\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108870 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-client-ca\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108885 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-encryption-config\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108900 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108917 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-audit-dir\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108949 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bbcc608-06c7-4af7-8e13-590aa487913a-serving-cert\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108966 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1947b883-d3db-45c3-951b-4025e2517403-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108984 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4a755dd-bcbd-4cf0-a396-673809d92250-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.108998 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109013 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-audit-policies\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109028 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-config\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109060 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109077 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1947b883-d3db-45c3-951b-4025e2517403-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109091 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-policies\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109108 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjgxb\" (UniqueName: \"kubernetes.io/projected/1bbcc608-06c7-4af7-8e13-590aa487913a-kube-api-access-tjgxb\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109115 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109145 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4a755dd-bcbd-4cf0-a396-673809d92250-config\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109182 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109205 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109225 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsc9q\" (UniqueName: \"kubernetes.io/projected/d4a755dd-bcbd-4cf0-a396-673809d92250-kube-api-access-qsc9q\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109247 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-client-ca\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109271 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-service-ca-bundle\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109293 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-dir\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109314 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109333 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbea8073-c662-4a72-871b-0abf65d79bc7-config\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109351 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-config\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109373 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109394 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109414 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mfqw\" (UniqueName: \"kubernetes.io/projected/6a3eaabd-8f61-487b-83f8-e458dfa24673-kube-api-access-8mfqw\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109435 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d71418-4a49-437a-8429-1f0569d205b0-serving-cert\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109456 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109477 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109497 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109521 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-serving-cert\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109543 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d4a755dd-bcbd-4cf0-a396-673809d92250-images\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.109862 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wm7m5"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.110410 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6mg7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.110824 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.111192 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.112831 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.113049 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.107627 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.117646 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.118207 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.119049 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.119471 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.119506 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.119708 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.119758 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.127875 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.128007 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.128284 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.131088 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.134515 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.136677 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.136999 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.144253 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq629"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.148780 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.149093 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kp656"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.150415 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.150706 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.165453 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.168133 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.170562 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.170801 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.170973 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.171329 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.171373 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-qtsfq"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.171620 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.171701 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7xchq"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.171791 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.172621 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.173118 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.173760 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qzc8t"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.174711 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.174966 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.175404 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.176036 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.177025 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.177768 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.177983 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dktn5"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.178401 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.179044 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.179886 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.181289 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.182196 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-b2dbs"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.183009 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lx4dj"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.183753 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.184630 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.185550 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.186435 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-j77jf"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.187333 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq629"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.188807 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qzc8t"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.189203 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.189201 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-k98nn"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.196782 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.198853 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b9kth"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.201423 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8cg7l"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.202361 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.203396 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29419200-2mqnz"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.204225 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.206187 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.207111 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.207121 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-cvr94"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.207762 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.208092 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.208222 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.208650 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.209057 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.209842 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210001 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210013 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsc9q\" (UniqueName: \"kubernetes.io/projected/d4a755dd-bcbd-4cf0-a396-673809d92250-kube-api-access-qsc9q\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210043 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhmnz\" (UniqueName: \"kubernetes.io/projected/712511e1-14ba-4465-8050-02b8d5916f46-kube-api-access-dhmnz\") pod \"image-pruner-29419200-2mqnz\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210075 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210119 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-client-ca\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210145 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-service-ca-bundle\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210169 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-dir\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210192 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210218 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-service-ca\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210241 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210264 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mfqw\" (UniqueName: \"kubernetes.io/projected/6a3eaabd-8f61-487b-83f8-e458dfa24673-kube-api-access-8mfqw\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210292 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c62xv\" (UniqueName: \"kubernetes.io/projected/c377f24c-360e-4c65-ad5d-6423e735d7a4-kube-api-access-c62xv\") pod \"downloads-7954f5f757-b2dbs\" (UID: \"c377f24c-360e-4c65-ad5d-6423e735d7a4\") " pod="openshift-console/downloads-7954f5f757-b2dbs" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210316 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbea8073-c662-4a72-871b-0abf65d79bc7-config\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210336 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-config\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210354 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210376 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d71418-4a49-437a-8429-1f0569d205b0-serving-cert\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210395 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210415 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88660132-2148-4282-92d7-a9f8d86b07ef-serving-cert\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210438 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-serving-cert\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210461 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2vlx\" (UniqueName: \"kubernetes.io/projected/006160e6-b0e3-4a8f-b297-d4ec96a2e703-kube-api-access-z2vlx\") pod \"cluster-samples-operator-665b6dd947-4qqnf\" (UID: \"006160e6-b0e3-4a8f-b297-d4ec96a2e703\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210484 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210505 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210530 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d4a755dd-bcbd-4cf0-a396-673809d92250-images\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210540 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210552 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210574 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210598 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vxh4\" (UniqueName: \"kubernetes.io/projected/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-kube-api-access-9vxh4\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210621 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp6tw\" (UniqueName: \"kubernetes.io/projected/154b1131-dee9-40da-8e6f-a70650023fe5-kube-api-access-sp6tw\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210644 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210665 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/712511e1-14ba-4465-8050-02b8d5916f46-serviceca\") pod \"image-pruner-29419200-2mqnz\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210689 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/dbea8073-c662-4a72-871b-0abf65d79bc7-machine-approver-tls\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210712 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210749 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-serving-cert\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210773 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-available-featuregates\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210799 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlpw2\" (UniqueName: \"kubernetes.io/projected/dc05c216-95a6-4890-9fc7-7eb70233e104-kube-api-access-mlpw2\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210824 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210849 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqvw2\" (UniqueName: \"kubernetes.io/projected/dbea8073-c662-4a72-871b-0abf65d79bc7-kube-api-access-jqvw2\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210868 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-etcd-client\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211102 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211123 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnwqm\" (UniqueName: \"kubernetes.io/projected/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-kube-api-access-fnwqm\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211142 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-image-import-ca\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211164 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbea8073-c662-4a72-871b-0abf65d79bc7-auth-proxy-config\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211183 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-etcd-serving-ca\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211202 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-encryption-config\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211231 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-etcd-client\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211254 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-client\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211270 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-client-ca\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211275 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-audit\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211330 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2dnz\" (UniqueName: \"kubernetes.io/projected/623b96d9-884a-4dea-a6a9-301f384d8666-kube-api-access-v2dnz\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211354 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/006160e6-b0e3-4a8f-b297-d4ec96a2e703-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4qqnf\" (UID: \"006160e6-b0e3-4a8f-b297-d4ec96a2e703\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211376 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-config\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211398 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-config\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211415 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqb8j\" (UniqueName: \"kubernetes.io/projected/9683111b-558c-4f07-9908-2caab08063f2-kube-api-access-xqb8j\") pod \"dns-operator-744455d44c-kp656\" (UID: \"9683111b-558c-4f07-9908-2caab08063f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211435 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cd1f7237-e796-4af1-b911-b15b54030e38-node-pullsecrets\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211454 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-serving-cert\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211471 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-config\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211487 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cd1f7237-e796-4af1-b911-b15b54030e38-audit-dir\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211506 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9683111b-558c-4f07-9908-2caab08063f2-metrics-tls\") pod \"dns-operator-744455d44c-kp656\" (UID: \"9683111b-558c-4f07-9908-2caab08063f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211530 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211547 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-ca\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211547 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-service-ca-bundle\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211563 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/154b1131-dee9-40da-8e6f-a70650023fe5-metrics-tls\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211586 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211604 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211624 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/623b96d9-884a-4dea-a6a9-301f384d8666-serving-cert\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211663 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx6jb\" (UniqueName: \"kubernetes.io/projected/82d71418-4a49-437a-8429-1f0569d205b0-kube-api-access-nx6jb\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211678 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkr94\" (UniqueName: \"kubernetes.io/projected/cd1f7237-e796-4af1-b911-b15b54030e38-kube-api-access-vkr94\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211696 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzvcr\" (UniqueName: \"kubernetes.io/projected/1947b883-d3db-45c3-951b-4025e2517403-kube-api-access-dzvcr\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211716 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztht8\" (UniqueName: \"kubernetes.io/projected/88660132-2148-4282-92d7-a9f8d86b07ef-kube-api-access-ztht8\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211732 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6nvz\" (UniqueName: \"kubernetes.io/projected/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-kube-api-access-m6nvz\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211752 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-client-ca\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211768 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211801 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211818 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-encryption-config\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211834 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4a755dd-bcbd-4cf0-a396-673809d92250-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211851 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211866 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/154b1131-dee9-40da-8e6f-a70650023fe5-trusted-ca\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211881 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-audit-policies\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211895 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-audit-dir\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211985 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bbcc608-06c7-4af7-8e13-590aa487913a-serving-cert\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.212005 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1947b883-d3db-45c3-951b-4025e2517403-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.212021 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1947b883-d3db-45c3-951b-4025e2517403-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.212036 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-policies\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.212051 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/154b1131-dee9-40da-8e6f-a70650023fe5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.212101 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-config\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.212117 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.212136 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.214322 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.214363 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjgxb\" (UniqueName: \"kubernetes.io/projected/1bbcc608-06c7-4af7-8e13-590aa487913a-kube-api-access-tjgxb\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.214394 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4a755dd-bcbd-4cf0-a396-673809d92250-config\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.214599 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.215477 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.215596 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4a755dd-bcbd-4cf0-a396-673809d92250-config\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.215835 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.216045 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-config\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.214316 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cd1f7237-e796-4af1-b911-b15b54030e38-node-pullsecrets\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.216047 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-config\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.216144 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cd1f7237-e796-4af1-b911-b15b54030e38-audit-dir\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.216218 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-audit-dir\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.217084 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d4a755dd-bcbd-4cf0-a396-673809d92250-images\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.217175 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.210266 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-dir\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.217297 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.211906 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-audit\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.218130 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.218656 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.220865 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/623b96d9-884a-4dea-a6a9-301f384d8666-serving-cert\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.222730 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lc28j"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.223850 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4a755dd-bcbd-4cf0-a396-673809d92250-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.224576 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.224691 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.226580 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.226772 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kp656"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.228317 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.230002 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.231395 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.232005 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-audit-policies\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.232089 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-serving-cert\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.232569 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbea8073-c662-4a72-871b-0abf65d79bc7-config\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.232594 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-serving-cert\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.232734 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-encryption-config\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.232874 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-client-ca\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233029 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-etcd-serving-ca\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233056 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d71418-4a49-437a-8429-1f0569d205b0-serving-cert\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233194 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-serving-cert\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233364 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233438 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-config\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233644 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233721 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233774 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.233959 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbea8073-c662-4a72-871b-0abf65d79bc7-auth-proxy-config\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.235137 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.235415 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-available-featuregates\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236083 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1947b883-d3db-45c3-951b-4025e2517403-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236216 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-config\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236580 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/dbea8073-c662-4a72-871b-0abf65d79bc7-machine-approver-tls\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236667 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-etcd-client\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236682 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-policies\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236847 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cvr94"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236857 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.236949 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bbcc608-06c7-4af7-8e13-590aa487913a-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.237437 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.238364 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-encryption-config\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.238749 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.238774 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bbcc608-06c7-4af7-8e13-590aa487913a-serving-cert\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.238785 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.239173 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cd1f7237-e796-4af1-b911-b15b54030e38-etcd-client\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.239225 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.239531 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1947b883-d3db-45c3-951b-4025e2517403-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.239782 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.242371 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7h4jt"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.242960 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.243060 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.243164 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/cd1f7237-e796-4af1-b911-b15b54030e38-image-import-ca\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.243573 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.244646 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.245758 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6mg7"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.247090 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wm7m5"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.247144 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.248962 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.250152 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-sb889"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.251104 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.251334 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.252621 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-qq7r4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.253054 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.254107 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7h4jt"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.255288 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lc28j"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.256437 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.258093 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.260371 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-sb889"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.261533 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.263049 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qq7r4"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.264060 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-lwc9x"] Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.264563 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.267814 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.287414 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.308259 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.314947 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.314987 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/154b1131-dee9-40da-8e6f-a70650023fe5-trusted-ca\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315015 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/154b1131-dee9-40da-8e6f-a70650023fe5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315039 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315117 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhmnz\" (UniqueName: \"kubernetes.io/projected/712511e1-14ba-4465-8050-02b8d5916f46-kube-api-access-dhmnz\") pod \"image-pruner-29419200-2mqnz\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315150 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-service-ca\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315215 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c62xv\" (UniqueName: \"kubernetes.io/projected/c377f24c-360e-4c65-ad5d-6423e735d7a4-kube-api-access-c62xv\") pod \"downloads-7954f5f757-b2dbs\" (UID: \"c377f24c-360e-4c65-ad5d-6423e735d7a4\") " pod="openshift-console/downloads-7954f5f757-b2dbs" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315350 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88660132-2148-4282-92d7-a9f8d86b07ef-serving-cert\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315370 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2vlx\" (UniqueName: \"kubernetes.io/projected/006160e6-b0e3-4a8f-b297-d4ec96a2e703-kube-api-access-z2vlx\") pod \"cluster-samples-operator-665b6dd947-4qqnf\" (UID: \"006160e6-b0e3-4a8f-b297-d4ec96a2e703\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315708 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315739 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp6tw\" (UniqueName: \"kubernetes.io/projected/154b1131-dee9-40da-8e6f-a70650023fe5-kube-api-access-sp6tw\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315754 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315790 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/712511e1-14ba-4465-8050-02b8d5916f46-serviceca\") pod \"image-pruner-29419200-2mqnz\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315823 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlpw2\" (UniqueName: \"kubernetes.io/projected/dc05c216-95a6-4890-9fc7-7eb70233e104-kube-api-access-mlpw2\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315879 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-client\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315787 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-service-ca\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315911 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/006160e6-b0e3-4a8f-b297-d4ec96a2e703-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4qqnf\" (UID: \"006160e6-b0e3-4a8f-b297-d4ec96a2e703\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.315996 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-config\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316031 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqb8j\" (UniqueName: \"kubernetes.io/projected/9683111b-558c-4f07-9908-2caab08063f2-kube-api-access-xqb8j\") pod \"dns-operator-744455d44c-kp656\" (UID: \"9683111b-558c-4f07-9908-2caab08063f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316066 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9683111b-558c-4f07-9908-2caab08063f2-metrics-tls\") pod \"dns-operator-744455d44c-kp656\" (UID: \"9683111b-558c-4f07-9908-2caab08063f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316091 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316119 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-ca\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316141 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/154b1131-dee9-40da-8e6f-a70650023fe5-metrics-tls\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316205 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztht8\" (UniqueName: \"kubernetes.io/projected/88660132-2148-4282-92d7-a9f8d86b07ef-kube-api-access-ztht8\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316230 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6nvz\" (UniqueName: \"kubernetes.io/projected/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-kube-api-access-m6nvz\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316753 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316750 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-config\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.316945 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/712511e1-14ba-4465-8050-02b8d5916f46-serviceca\") pod \"image-pruner-29419200-2mqnz\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.317099 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-ca\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.318683 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88660132-2148-4282-92d7-a9f8d86b07ef-etcd-client\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.319581 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/006160e6-b0e3-4a8f-b297-d4ec96a2e703-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4qqnf\" (UID: \"006160e6-b0e3-4a8f-b297-d4ec96a2e703\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.319604 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.319974 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88660132-2148-4282-92d7-a9f8d86b07ef-serving-cert\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.327751 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.348645 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.367718 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.388054 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.407517 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.428543 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.448202 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.467612 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.480093 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/154b1131-dee9-40da-8e6f-a70650023fe5-metrics-tls\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.495837 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.509652 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/154b1131-dee9-40da-8e6f-a70650023fe5-trusted-ca\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.511050 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.548400 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.568704 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.579585 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.589253 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.616455 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.618422 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.628672 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.648316 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.668221 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.681724 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9683111b-558c-4f07-9908-2caab08063f2-metrics-tls\") pod \"dns-operator-744455d44c-kp656\" (UID: \"9683111b-558c-4f07-9908-2caab08063f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.688814 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.708104 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.747888 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.768163 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.788781 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.808954 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.828919 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.847883 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.867843 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.889328 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.908285 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.929408 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.949438 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.968195 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 08 00:09:49 crc kubenswrapper[4745]: I1208 00:09:49.988406 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.008353 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.028308 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.048798 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.068397 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.088247 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.108214 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.128383 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.148436 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.168128 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.186642 4745 request.go:700] Waited for 1.01154174s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-multus/secrets?fieldSelector=metadata.name%3Dmultus-ac-dockercfg-9lkdf&limit=500&resourceVersion=0 Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.189095 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.209085 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.231319 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.248689 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.268600 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.288286 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.308422 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.328941 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.348767 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.368025 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.388181 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.408150 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.430615 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.448517 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.468087 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.488636 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.509125 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.529346 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.549132 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.568598 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.588884 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.608249 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.648947 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.652783 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsc9q\" (UniqueName: \"kubernetes.io/projected/d4a755dd-bcbd-4cf0-a396-673809d92250-kube-api-access-qsc9q\") pod \"machine-api-operator-5694c8668f-dktn5\" (UID: \"d4a755dd-bcbd-4cf0-a396-673809d92250\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.668623 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.707660 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2dnz\" (UniqueName: \"kubernetes.io/projected/623b96d9-884a-4dea-a6a9-301f384d8666-kube-api-access-v2dnz\") pod \"route-controller-manager-6576b87f9c-sqqdd\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.709363 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.729025 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.751759 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.785797 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkr94\" (UniqueName: \"kubernetes.io/projected/cd1f7237-e796-4af1-b911-b15b54030e38-kube-api-access-vkr94\") pod \"apiserver-76f77b778f-lx4dj\" (UID: \"cd1f7237-e796-4af1-b911-b15b54030e38\") " pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.803909 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx6jb\" (UniqueName: \"kubernetes.io/projected/82d71418-4a49-437a-8429-1f0569d205b0-kube-api-access-nx6jb\") pod \"controller-manager-879f6c89f-7xchq\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.820291 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.823490 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzvcr\" (UniqueName: \"kubernetes.io/projected/1947b883-d3db-45c3-951b-4025e2517403-kube-api-access-dzvcr\") pod \"openshift-apiserver-operator-796bbdcf4f-6s6x7\" (UID: \"1947b883-d3db-45c3-951b-4025e2517403\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.837757 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.845137 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vxh4\" (UniqueName: \"kubernetes.io/projected/d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe-kube-api-access-9vxh4\") pod \"apiserver-7bbb656c7d-2nbvg\" (UID: \"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.854587 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.867724 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.872525 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.888917 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.908037 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.924222 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.934458 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.947660 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.955953 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.971361 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqvw2\" (UniqueName: \"kubernetes.io/projected/dbea8073-c662-4a72-871b-0abf65d79bc7-kube-api-access-jqvw2\") pod \"machine-approver-56656f9798-rkq5r\" (UID: \"dbea8073-c662-4a72-871b-0abf65d79bc7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:50 crc kubenswrapper[4745]: I1208 00:09:50.985165 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnwqm\" (UniqueName: \"kubernetes.io/projected/64fa4ddd-3dbd-4910-b8f8-dba1bb97b963-kube-api-access-fnwqm\") pod \"openshift-config-operator-7777fb866f-q5k5x\" (UID: \"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.004040 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjgxb\" (UniqueName: \"kubernetes.io/projected/1bbcc608-06c7-4af7-8e13-590aa487913a-kube-api-access-tjgxb\") pod \"authentication-operator-69f744f599-k98nn\" (UID: \"1bbcc608-06c7-4af7-8e13-590aa487913a\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.028681 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.029158 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mfqw\" (UniqueName: \"kubernetes.io/projected/6a3eaabd-8f61-487b-83f8-e458dfa24673-kube-api-access-8mfqw\") pod \"oauth-openshift-558db77b4-b9kth\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.050403 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.068205 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.087689 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.110990 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.127711 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.148948 4745 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.167739 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.188260 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.197168 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.209000 4745 request.go:700] Waited for 1.955733728s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/secrets?fieldSelector=metadata.name%3Dcanary-serving-cert&limit=500&resourceVersion=0 Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.213282 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.215262 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.227652 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.247667 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.262767 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.268149 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.272570 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.278592 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.287646 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.300804 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7xchq"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.301889 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lx4dj"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.310496 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 08 00:09:51 crc kubenswrapper[4745]: W1208 00:09:51.318590 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd1f7237_e796_4af1_b911_b15b54030e38.slice/crio-10d509ee369d5710317fd8a13018d09dc59a67d99f339a6c173dc8d5b0e3582f WatchSource:0}: Error finding container 10d509ee369d5710317fd8a13018d09dc59a67d99f339a6c173dc8d5b0e3582f: Status 404 returned error can't find the container with id 10d509ee369d5710317fd8a13018d09dc59a67d99f339a6c173dc8d5b0e3582f Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.325832 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dktn5"] Dec 08 00:09:51 crc kubenswrapper[4745]: W1208 00:09:51.326402 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82d71418_4a49_437a_8429_1f0569d205b0.slice/crio-1a840b2fa2d6d7184ddcf31ceb99131abfe8022d3c454b3a4e1c8da720bd496c WatchSource:0}: Error finding container 1a840b2fa2d6d7184ddcf31ceb99131abfe8022d3c454b3a4e1c8da720bd496c: Status 404 returned error can't find the container with id 1a840b2fa2d6d7184ddcf31ceb99131abfe8022d3c454b3a4e1c8da720bd496c Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.357632 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:51 crc kubenswrapper[4745]: W1208 00:09:51.371124 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4a755dd_bcbd_4cf0_a396_673809d92250.slice/crio-2f0ad42e8afbae86fbc0673143c25d09fc8fb1ddd0dad630081c5233b597c2fc WatchSource:0}: Error finding container 2f0ad42e8afbae86fbc0673143c25d09fc8fb1ddd0dad630081c5233b597c2fc: Status 404 returned error can't find the container with id 2f0ad42e8afbae86fbc0673143c25d09fc8fb1ddd0dad630081c5233b597c2fc Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.378060 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.380762 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.386491 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/154b1131-dee9-40da-8e6f-a70650023fe5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.389400 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhmnz\" (UniqueName: \"kubernetes.io/projected/712511e1-14ba-4465-8050-02b8d5916f46-kube-api-access-dhmnz\") pod \"image-pruner-29419200-2mqnz\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.401516 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c62xv\" (UniqueName: \"kubernetes.io/projected/c377f24c-360e-4c65-ad5d-6423e735d7a4-kube-api-access-c62xv\") pod \"downloads-7954f5f757-b2dbs\" (UID: \"c377f24c-360e-4c65-ad5d-6423e735d7a4\") " pod="openshift-console/downloads-7954f5f757-b2dbs" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.434496 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2vlx\" (UniqueName: \"kubernetes.io/projected/006160e6-b0e3-4a8f-b297-d4ec96a2e703-kube-api-access-z2vlx\") pod \"cluster-samples-operator-665b6dd947-4qqnf\" (UID: \"006160e6-b0e3-4a8f-b297-d4ec96a2e703\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.448026 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b9kth"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.449134 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlpw2\" (UniqueName: \"kubernetes.io/projected/dc05c216-95a6-4890-9fc7-7eb70233e104-kube-api-access-mlpw2\") pod \"marketplace-operator-79b997595-hq629\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.470005 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp6tw\" (UniqueName: \"kubernetes.io/projected/154b1131-dee9-40da-8e6f-a70650023fe5-kube-api-access-sp6tw\") pod \"ingress-operator-5b745b69d9-6vpp7\" (UID: \"154b1131-dee9-40da-8e6f-a70650023fe5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.485720 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqb8j\" (UniqueName: \"kubernetes.io/projected/9683111b-558c-4f07-9908-2caab08063f2-kube-api-access-xqb8j\") pod \"dns-operator-744455d44c-kp656\" (UID: \"9683111b-558c-4f07-9908-2caab08063f2\") " pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.505324 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztht8\" (UniqueName: \"kubernetes.io/projected/88660132-2148-4282-92d7-a9f8d86b07ef-kube-api-access-ztht8\") pod \"etcd-operator-b45778765-wm7m5\" (UID: \"88660132-2148-4282-92d7-a9f8d86b07ef\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.539270 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6nvz\" (UniqueName: \"kubernetes.io/projected/230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1-kube-api-access-m6nvz\") pod \"cluster-image-registry-operator-dc59b4c8b-9d288\" (UID: \"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.578192 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-k98nn"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.594670 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.600543 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.608383 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648662 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-certificates\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648707 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-trusted-ca\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648725 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82qq5\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-kube-api-access-82qq5\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648740 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-service-ca\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648758 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-oauth-serving-cert\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648771 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-config\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648801 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a704627f-7539-4aec-ba1a-344a957ab7bf-console-serving-cert\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648816 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-trusted-ca\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648843 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1c9c3543-96ff-4a6f-9499-95bd43aa7368-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648856 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648871 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-trusted-ca-bundle\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648885 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-console-config\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648908 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1c9c3543-96ff-4a6f-9499-95bd43aa7368-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648941 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-bound-sa-token\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648968 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a704627f-7539-4aec-ba1a-344a957ab7bf-console-oauth-config\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.648988 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sglpg\" (UniqueName: \"kubernetes.io/projected/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-kube-api-access-sglpg\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.649027 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.649050 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.649071 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-serving-cert\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.649131 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg25n\" (UniqueName: \"kubernetes.io/projected/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-kube-api-access-kg25n\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.649150 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-tls\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.649165 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxm6x\" (UniqueName: \"kubernetes.io/projected/a704627f-7539-4aec-ba1a-344a957ab7bf-kube-api-access-wxm6x\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: E1208 00:09:51.650742 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.150728183 +0000 UTC m=+147.579934483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:51 crc kubenswrapper[4745]: W1208 00:09:51.660435 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1bbcc608_06c7_4af7_8e13_590aa487913a.slice/crio-f12319f48320f5abaaf77ca2e16a0d60056283d29c08022bc73c13b39274c11e WatchSource:0}: Error finding container f12319f48320f5abaaf77ca2e16a0d60056283d29c08022bc73c13b39274c11e: Status 404 returned error can't find the container with id f12319f48320f5abaaf77ca2e16a0d60056283d29c08022bc73c13b39274c11e Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.671600 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.678773 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-b2dbs" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.694645 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.706898 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.717806 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.735815 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kp656" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750355 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:51 crc kubenswrapper[4745]: E1208 00:09:51.750462 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.250438896 +0000 UTC m=+147.679645196 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750585 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d685ac29-3c32-478c-ba21-34b3bedc547b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750648 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31884019-0917-4fda-a319-2d896944a6b8-serving-cert\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750670 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-srv-cert\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750718 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-default-certificate\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750741 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/011ba434-d4cf-479a-8732-d8621edc4fcf-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dfqd7\" (UID: \"011ba434-d4cf-479a-8732-d8621edc4fcf\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750765 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h698n\" (UniqueName: \"kubernetes.io/projected/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-kube-api-access-h698n\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750785 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-signing-key\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750827 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-service-ca\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750853 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zcch\" (UniqueName: \"kubernetes.io/projected/615970c1-cf68-4be8-b528-37937fa778ab-kube-api-access-6zcch\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750879 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-config\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750899 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6z6p\" (UniqueName: \"kubernetes.io/projected/1e54638b-8ec2-49df-8c72-25dceedafbd0-kube-api-access-w6z6p\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.750986 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtzn7\" (UniqueName: \"kubernetes.io/projected/011ba434-d4cf-479a-8732-d8621edc4fcf-kube-api-access-rtzn7\") pod \"package-server-manager-789f6589d5-dfqd7\" (UID: \"011ba434-d4cf-479a-8732-d8621edc4fcf\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751013 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-mountpoint-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751041 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6z9q\" (UniqueName: \"kubernetes.io/projected/cb60ee68-795e-44a7-896f-cc0ab8963417-kube-api-access-t6z9q\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751078 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751113 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/615970c1-cf68-4be8-b528-37937fa778ab-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751134 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-metrics-certs\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751182 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751209 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa019532-0002-464b-9965-2804d83126a8-config-volume\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751230 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cb60ee68-795e-44a7-896f-cc0ab8963417-proxy-tls\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751255 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751277 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/49482394-a214-4307-95c3-1d75283d2ff4-images\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751297 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d685ac29-3c32-478c-ba21-34b3bedc547b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751340 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a15e5593-aef3-43e1-894e-51a109c501a7-service-ca-bundle\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751366 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxm6x\" (UniqueName: \"kubernetes.io/projected/a704627f-7539-4aec-ba1a-344a957ab7bf-kube-api-access-wxm6x\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751407 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtn2s\" (UniqueName: \"kubernetes.io/projected/4710a169-2ce7-48ed-b70f-d637966bbcfd-kube-api-access-xtn2s\") pod \"multus-admission-controller-857f4d67dd-qzc8t\" (UID: \"4710a169-2ce7-48ed-b70f-d637966bbcfd\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751436 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-metrics-tls\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751459 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1e54638b-8ec2-49df-8c72-25dceedafbd0-certs\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751514 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cb60ee68-795e-44a7-896f-cc0ab8963417-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751538 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zlxp\" (UniqueName: \"kubernetes.io/projected/31884019-0917-4fda-a319-2d896944a6b8-kube-api-access-9zlxp\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751599 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/356675b0-56a2-4687-9158-922a52484fc1-webhook-cert\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751624 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4710a169-2ce7-48ed-b70f-d637966bbcfd-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qzc8t\" (UID: \"4710a169-2ce7-48ed-b70f-d637966bbcfd\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751646 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvzq8\" (UniqueName: \"kubernetes.io/projected/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-kube-api-access-vvzq8\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751681 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-plugins-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751717 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-trusted-ca\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751743 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82qq5\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-kube-api-access-82qq5\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751759 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1e54638b-8ec2-49df-8c72-25dceedafbd0-node-bootstrap-token\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751775 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-oauth-serving-cert\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751792 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bhjz\" (UniqueName: \"kubernetes.io/projected/37a32702-19b8-4fe3-8eda-52e5a39db569-kube-api-access-6bhjz\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751806 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-profile-collector-cert\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751821 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k2kc\" (UniqueName: \"kubernetes.io/projected/8151bcec-d2f6-4c68-bbda-c0f31a47f5ed-kube-api-access-6k2kc\") pod \"ingress-canary-qq7r4\" (UID: \"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed\") " pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751853 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a704627f-7539-4aec-ba1a-344a957ab7bf-console-serving-cert\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751868 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-trusted-ca\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751883 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc9qk\" (UniqueName: \"kubernetes.io/projected/fa019532-0002-464b-9965-2804d83126a8-kube-api-access-dc9qk\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751908 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49482394-a214-4307-95c3-1d75283d2ff4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751942 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-srv-cert\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751963 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751983 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-config-volume\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.751998 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdgmq\" (UniqueName: \"kubernetes.io/projected/39b1b9ce-45b4-45ff-b6d5-e4deedf30e19-kube-api-access-gdgmq\") pod \"migrator-59844c95c7-wszxv\" (UID: \"39b1b9ce-45b4-45ff-b6d5-e4deedf30e19\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752013 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpg62\" (UniqueName: \"kubernetes.io/projected/a15e5593-aef3-43e1-894e-51a109c501a7-kube-api-access-hpg62\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752027 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/356675b0-56a2-4687-9158-922a52484fc1-tmpfs\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752044 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1c9c3543-96ff-4a6f-9499-95bd43aa7368-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752059 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-stats-auth\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752074 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxdqs\" (UniqueName: \"kubernetes.io/projected/356675b0-56a2-4687-9158-922a52484fc1-kube-api-access-fxdqs\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752093 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-trusted-ca-bundle\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752115 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmvl9\" (UniqueName: \"kubernetes.io/projected/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-kube-api-access-rmvl9\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752135 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa019532-0002-464b-9965-2804d83126a8-secret-volume\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752152 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/27dbf6ed-7b15-4ddd-84b0-83fa2f178c63-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nsh9z\" (UID: \"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752179 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-console-config\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752194 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/615970c1-cf68-4be8-b528-37937fa778ab-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752219 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1c9c3543-96ff-4a6f-9499-95bd43aa7368-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752236 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-bound-sa-token\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752253 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h96ks\" (UniqueName: \"kubernetes.io/projected/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-kube-api-access-h96ks\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752282 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-config\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752298 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-signing-cabundle\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752318 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a704627f-7539-4aec-ba1a-344a957ab7bf-console-oauth-config\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752342 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sglpg\" (UniqueName: \"kubernetes.io/projected/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-kube-api-access-sglpg\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752366 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752382 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4hln\" (UniqueName: \"kubernetes.io/projected/49482394-a214-4307-95c3-1d75283d2ff4-kube-api-access-p4hln\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752401 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jf4q\" (UniqueName: \"kubernetes.io/projected/27dbf6ed-7b15-4ddd-84b0-83fa2f178c63-kube-api-access-7jf4q\") pod \"control-plane-machine-set-operator-78cbb6b69f-nsh9z\" (UID: \"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752422 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752445 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-serving-cert\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752460 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752478 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/356675b0-56a2-4687-9158-922a52484fc1-apiservice-cert\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752499 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752558 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-socket-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752585 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg25n\" (UniqueName: \"kubernetes.io/projected/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-kube-api-access-kg25n\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752606 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/49482394-a214-4307-95c3-1d75283d2ff4-proxy-tls\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752626 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-csi-data-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752660 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-tls\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752682 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31884019-0917-4fda-a319-2d896944a6b8-config\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752729 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8151bcec-d2f6-4c68-bbda-c0f31a47f5ed-cert\") pod \"ingress-canary-qq7r4\" (UID: \"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed\") " pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752764 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d685ac29-3c32-478c-ba21-34b3bedc547b-config\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752786 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-registration-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752810 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-certificates\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.752833 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.755457 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.756795 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-service-ca\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.756797 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-config\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: E1208 00:09:51.757972 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.257918195 +0000 UTC m=+147.687124495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.758852 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-oauth-serving-cert\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.760039 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-trusted-ca\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.763196 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1c9c3543-96ff-4a6f-9499-95bd43aa7368-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.763271 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-trusted-ca\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.765692 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-console-config\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.773838 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.774438 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a704627f-7539-4aec-ba1a-344a957ab7bf-console-serving-cert\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.777053 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-serving-cert\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.787697 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a704627f-7539-4aec-ba1a-344a957ab7bf-console-oauth-config\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.791492 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-certificates\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.793380 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1c9c3543-96ff-4a6f-9499-95bd43aa7368-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.793917 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a704627f-7539-4aec-ba1a-344a957ab7bf-trusted-ca-bundle\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.794446 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-tls\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.815235 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxm6x\" (UniqueName: \"kubernetes.io/projected/a704627f-7539-4aec-ba1a-344a957ab7bf-kube-api-access-wxm6x\") pod \"console-f9d7485db-8cg7l\" (UID: \"a704627f-7539-4aec-ba1a-344a957ab7bf\") " pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.824540 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82qq5\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-kube-api-access-82qq5\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.842896 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-bound-sa-token\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.844484 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg25n\" (UniqueName: \"kubernetes.io/projected/cf2f7e37-36bf-4e20-91b9-7f63c53ec998-kube-api-access-kg25n\") pod \"openshift-controller-manager-operator-756b6f6bc6-6z5q4\" (UID: \"cf2f7e37-36bf-4e20-91b9-7f63c53ec998\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.854691 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.854952 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cb60ee68-795e-44a7-896f-cc0ab8963417-proxy-tls\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.854980 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa019532-0002-464b-9965-2804d83126a8-config-volume\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855005 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/49482394-a214-4307-95c3-1d75283d2ff4-images\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855039 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d685ac29-3c32-478c-ba21-34b3bedc547b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855062 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a15e5593-aef3-43e1-894e-51a109c501a7-service-ca-bundle\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855087 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtn2s\" (UniqueName: \"kubernetes.io/projected/4710a169-2ce7-48ed-b70f-d637966bbcfd-kube-api-access-xtn2s\") pod \"multus-admission-controller-857f4d67dd-qzc8t\" (UID: \"4710a169-2ce7-48ed-b70f-d637966bbcfd\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855114 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855135 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-metrics-tls\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855156 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zlxp\" (UniqueName: \"kubernetes.io/projected/31884019-0917-4fda-a319-2d896944a6b8-kube-api-access-9zlxp\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855179 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1e54638b-8ec2-49df-8c72-25dceedafbd0-certs\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855202 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cb60ee68-795e-44a7-896f-cc0ab8963417-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855228 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/356675b0-56a2-4687-9158-922a52484fc1-webhook-cert\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855251 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4710a169-2ce7-48ed-b70f-d637966bbcfd-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qzc8t\" (UID: \"4710a169-2ce7-48ed-b70f-d637966bbcfd\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855272 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvzq8\" (UniqueName: \"kubernetes.io/projected/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-kube-api-access-vvzq8\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855317 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-plugins-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855343 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1e54638b-8ec2-49df-8c72-25dceedafbd0-node-bootstrap-token\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855368 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bhjz\" (UniqueName: \"kubernetes.io/projected/37a32702-19b8-4fe3-8eda-52e5a39db569-kube-api-access-6bhjz\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855389 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-profile-collector-cert\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.855413 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k2kc\" (UniqueName: \"kubernetes.io/projected/8151bcec-d2f6-4c68-bbda-c0f31a47f5ed-kube-api-access-6k2kc\") pod \"ingress-canary-qq7r4\" (UID: \"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed\") " pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:51 crc kubenswrapper[4745]: E1208 00:09:51.857867 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.357845664 +0000 UTC m=+147.787051964 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.858822 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa019532-0002-464b-9965-2804d83126a8-config-volume\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.858823 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a15e5593-aef3-43e1-894e-51a109c501a7-service-ca-bundle\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.859299 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/49482394-a214-4307-95c3-1d75283d2ff4-images\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862235 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49482394-a214-4307-95c3-1d75283d2ff4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862266 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-srv-cert\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862293 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc9qk\" (UniqueName: \"kubernetes.io/projected/fa019532-0002-464b-9965-2804d83126a8-kube-api-access-dc9qk\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862363 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862383 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-config-volume\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862398 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdgmq\" (UniqueName: \"kubernetes.io/projected/39b1b9ce-45b4-45ff-b6d5-e4deedf30e19-kube-api-access-gdgmq\") pod \"migrator-59844c95c7-wszxv\" (UID: \"39b1b9ce-45b4-45ff-b6d5-e4deedf30e19\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862419 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpg62\" (UniqueName: \"kubernetes.io/projected/a15e5593-aef3-43e1-894e-51a109c501a7-kube-api-access-hpg62\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862435 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/356675b0-56a2-4687-9158-922a52484fc1-tmpfs\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862455 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-stats-auth\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862471 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmvl9\" (UniqueName: \"kubernetes.io/projected/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-kube-api-access-rmvl9\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862500 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa019532-0002-464b-9965-2804d83126a8-secret-volume\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862519 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/27dbf6ed-7b15-4ddd-84b0-83fa2f178c63-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nsh9z\" (UID: \"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862536 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxdqs\" (UniqueName: \"kubernetes.io/projected/356675b0-56a2-4687-9158-922a52484fc1-kube-api-access-fxdqs\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862766 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/615970c1-cf68-4be8-b528-37937fa778ab-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862806 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h96ks\" (UniqueName: \"kubernetes.io/projected/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-kube-api-access-h96ks\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862825 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-config\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862845 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-signing-cabundle\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862872 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jf4q\" (UniqueName: \"kubernetes.io/projected/27dbf6ed-7b15-4ddd-84b0-83fa2f178c63-kube-api-access-7jf4q\") pod \"control-plane-machine-set-operator-78cbb6b69f-nsh9z\" (UID: \"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862887 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862908 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862939 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4hln\" (UniqueName: \"kubernetes.io/projected/49482394-a214-4307-95c3-1d75283d2ff4-kube-api-access-p4hln\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862955 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/356675b0-56a2-4687-9158-922a52484fc1-apiservice-cert\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862974 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.862990 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863015 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-socket-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863030 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-csi-data-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863045 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/49482394-a214-4307-95c3-1d75283d2ff4-proxy-tls\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863061 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31884019-0917-4fda-a319-2d896944a6b8-config\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863085 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d685ac29-3c32-478c-ba21-34b3bedc547b-config\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863099 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-registration-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863113 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8151bcec-d2f6-4c68-bbda-c0f31a47f5ed-cert\") pod \"ingress-canary-qq7r4\" (UID: \"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed\") " pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863139 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863155 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d685ac29-3c32-478c-ba21-34b3bedc547b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863171 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31884019-0917-4fda-a319-2d896944a6b8-serving-cert\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863186 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-default-certificate\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863201 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/011ba434-d4cf-479a-8732-d8621edc4fcf-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dfqd7\" (UID: \"011ba434-d4cf-479a-8732-d8621edc4fcf\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863219 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h698n\" (UniqueName: \"kubernetes.io/projected/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-kube-api-access-h698n\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863233 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-srv-cert\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863251 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-signing-key\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863285 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863303 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zcch\" (UniqueName: \"kubernetes.io/projected/615970c1-cf68-4be8-b528-37937fa778ab-kube-api-access-6zcch\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863321 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6z6p\" (UniqueName: \"kubernetes.io/projected/1e54638b-8ec2-49df-8c72-25dceedafbd0-kube-api-access-w6z6p\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863344 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtzn7\" (UniqueName: \"kubernetes.io/projected/011ba434-d4cf-479a-8732-d8621edc4fcf-kube-api-access-rtzn7\") pod \"package-server-manager-789f6589d5-dfqd7\" (UID: \"011ba434-d4cf-479a-8732-d8621edc4fcf\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863361 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-mountpoint-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863376 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6z9q\" (UniqueName: \"kubernetes.io/projected/cb60ee68-795e-44a7-896f-cc0ab8963417-kube-api-access-t6z9q\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863393 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/615970c1-cf68-4be8-b528-37937fa778ab-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863408 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-metrics-certs\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863437 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863772 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d685ac29-3c32-478c-ba21-34b3bedc547b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.864216 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-socket-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.865700 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/49482394-a214-4307-95c3-1d75283d2ff4-auth-proxy-config\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.866051 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d685ac29-3c32-478c-ba21-34b3bedc547b-config\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.866231 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-csi-data-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.866771 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cb60ee68-795e-44a7-896f-cc0ab8963417-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.863405 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.868295 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-plugins-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.868594 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cb60ee68-795e-44a7-896f-cc0ab8963417-proxy-tls\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.869011 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-config\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.871138 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/615970c1-cf68-4be8-b528-37937fa778ab-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.871529 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-signing-cabundle\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.872037 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/356675b0-56a2-4687-9158-922a52484fc1-tmpfs\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.872607 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/356675b0-56a2-4687-9158-922a52484fc1-webhook-cert\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.872654 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.873784 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-config-volume\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: E1208 00:09:51.873868 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.373853744 +0000 UTC m=+147.803060044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.874507 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31884019-0917-4fda-a319-2d896944a6b8-config\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.875862 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-mountpoint-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.877953 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.880768 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/49482394-a214-4307-95c3-1d75283d2ff4-proxy-tls\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.882823 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/27dbf6ed-7b15-4ddd-84b0-83fa2f178c63-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-nsh9z\" (UID: \"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.883449 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/615970c1-cf68-4be8-b528-37937fa778ab-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.884061 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4710a169-2ce7-48ed-b70f-d637966bbcfd-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qzc8t\" (UID: \"4710a169-2ce7-48ed-b70f-d637966bbcfd\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.884139 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/37a32702-19b8-4fe3-8eda-52e5a39db569-registration-dir\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.885008 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.886678 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa019532-0002-464b-9965-2804d83126a8-secret-volume\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.889545 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-metrics-tls\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.889743 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-signing-key\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.890855 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sglpg\" (UniqueName: \"kubernetes.io/projected/b2d515a0-c5ed-4407-8e58-a75c8c485fe3-kube-api-access-sglpg\") pod \"console-operator-58897d9998-j77jf\" (UID: \"b2d515a0-c5ed-4407-8e58-a75c8c485fe3\") " pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.896393 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1e54638b-8ec2-49df-8c72-25dceedafbd0-certs\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.896518 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.896716 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-default-certificate\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.896893 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-stats-auth\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.900406 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1e54638b-8ec2-49df-8c72-25dceedafbd0-node-bootstrap-token\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.902431 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.902606 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-srv-cert\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.905018 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/011ba434-d4cf-479a-8732-d8621edc4fcf-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dfqd7\" (UID: \"011ba434-d4cf-479a-8732-d8621edc4fcf\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.905064 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a15e5593-aef3-43e1-894e-51a109c501a7-metrics-certs\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.909828 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-srv-cert\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.913573 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31884019-0917-4fda-a319-2d896944a6b8-serving-cert\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.915549 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zlxp\" (UniqueName: \"kubernetes.io/projected/31884019-0917-4fda-a319-2d896944a6b8-kube-api-access-9zlxp\") pod \"service-ca-operator-777779d784-lc28j\" (UID: \"31884019-0917-4fda-a319-2d896944a6b8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.916827 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8151bcec-d2f6-4c68-bbda-c0f31a47f5ed-cert\") pod \"ingress-canary-qq7r4\" (UID: \"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed\") " pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.917724 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.925112 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/356675b0-56a2-4687-9158-922a52484fc1-apiservice-cert\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.932377 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-profile-collector-cert\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.938817 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k2kc\" (UniqueName: \"kubernetes.io/projected/8151bcec-d2f6-4c68-bbda-c0f31a47f5ed-kube-api-access-6k2kc\") pod \"ingress-canary-qq7r4\" (UID: \"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed\") " pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.943189 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288"] Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.951284 4745 generic.go:334] "Generic (PLEG): container finished" podID="d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe" containerID="3f7c441189e5247a2d5dd67b76b788600d449ececb182b02c2a25dce77093219" exitCode=0 Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.951604 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" event={"ID":"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe","Type":"ContainerDied","Data":"3f7c441189e5247a2d5dd67b76b788600d449ececb182b02c2a25dce77093219"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.951706 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" event={"ID":"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe","Type":"ContainerStarted","Data":"c981ac3c1cbc440d08bb7afc59f7289566cce05fb17f05d44c132ace12c48ceb"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.953359 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" event={"ID":"623b96d9-884a-4dea-a6a9-301f384d8666","Type":"ContainerStarted","Data":"49dd054dc178c4f9773ba203591976075c6c24ce3725895d163e8f7ffae686e5"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.953400 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" event={"ID":"623b96d9-884a-4dea-a6a9-301f384d8666","Type":"ContainerStarted","Data":"beacd63ec355550991859f6c01c643f7e14e4ac1f1fe90f6a7c433f37d45f335"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.954492 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.955578 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ac18e1eb-73d7-42e2-910b-3c86e12ef5e6-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-x6b49\" (UID: \"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.956578 4745 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-sqqdd container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.956628 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" podUID="623b96d9-884a-4dea-a6a9-301f384d8666" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.963904 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:51 crc kubenswrapper[4745]: E1208 00:09:51.964258 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.464225281 +0000 UTC m=+147.893431581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.964643 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.964681 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.966593 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:51 crc kubenswrapper[4745]: E1208 00:09:51.966790 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.466773349 +0000 UTC m=+147.895979739 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.970527 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" event={"ID":"d4a755dd-bcbd-4cf0-a396-673809d92250","Type":"ContainerStarted","Data":"0a52b0847cc93404a56dc574afc49b2687d9474d3466fbf9d28663899b1018a5"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.970670 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" event={"ID":"d4a755dd-bcbd-4cf0-a396-673809d92250","Type":"ContainerStarted","Data":"2f0ad42e8afbae86fbc0673143c25d09fc8fb1ddd0dad630081c5233b597c2fc"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.970826 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvzq8\" (UniqueName: \"kubernetes.io/projected/ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647-kube-api-access-vvzq8\") pod \"service-ca-9c57cc56f-7h4jt\" (UID: \"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647\") " pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.972665 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.981276 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" event={"ID":"82d71418-4a49-437a-8429-1f0569d205b0","Type":"ContainerStarted","Data":"fbbb504f347f733254f6f29984d236b9a34b1d97590023922dfe7b1203b2d0cb"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.981323 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" event={"ID":"82d71418-4a49-437a-8429-1f0569d205b0","Type":"ContainerStarted","Data":"1a840b2fa2d6d7184ddcf31ceb99131abfe8022d3c454b3a4e1c8da720bd496c"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.983042 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.987399 4745 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7xchq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.987468 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" podUID="82d71418-4a49-437a-8429-1f0569d205b0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.987637 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d685ac29-3c32-478c-ba21-34b3bedc547b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8kc2b\" (UID: \"d685ac29-3c32-478c-ba21-34b3bedc547b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.994552 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" event={"ID":"cd1f7237-e796-4af1-b911-b15b54030e38","Type":"ContainerStarted","Data":"10d509ee369d5710317fd8a13018d09dc59a67d99f339a6c173dc8d5b0e3582f"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.995399 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" event={"ID":"1bbcc608-06c7-4af7-8e13-590aa487913a","Type":"ContainerStarted","Data":"7c5e754d57b55914e2872b26b8409da520a7f3b1b0a2f738572208b994940f9a"} Dec 08 00:09:51 crc kubenswrapper[4745]: I1208 00:09:51.995426 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" event={"ID":"1bbcc608-06c7-4af7-8e13-590aa487913a","Type":"ContainerStarted","Data":"f12319f48320f5abaaf77ca2e16a0d60056283d29c08022bc73c13b39274c11e"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.001673 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.001943 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" event={"ID":"dbea8073-c662-4a72-871b-0abf65d79bc7","Type":"ContainerStarted","Data":"60fe893214a77a0873581de5af9e72e91c97921e889a3119eeed7d4a07ccdf9a"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.002138 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" event={"ID":"dbea8073-c662-4a72-871b-0abf65d79bc7","Type":"ContainerStarted","Data":"bd2bd80cce7b14802cb9392a2a2ecf43e7d666c2b703edb098080c3071ba583b"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.002876 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.004095 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" event={"ID":"1947b883-d3db-45c3-951b-4025e2517403","Type":"ContainerStarted","Data":"e12094d6baf84229376d2bbf21faadc3159a8b263ab3fe4f8cb1b5311f01871d"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.004130 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" event={"ID":"1947b883-d3db-45c3-951b-4025e2517403","Type":"ContainerStarted","Data":"331ca5f402129c759e48b8a16c6dfe25bb333c1e4166eb202f695e195caf5ce1"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.005068 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" event={"ID":"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963","Type":"ContainerStarted","Data":"0545009e6f88e319108cf59af33a6b22c42bad80399aa733223c777ca2a4eac4"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.007301 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtn2s\" (UniqueName: \"kubernetes.io/projected/4710a169-2ce7-48ed-b70f-d637966bbcfd-kube-api-access-xtn2s\") pod \"multus-admission-controller-857f4d67dd-qzc8t\" (UID: \"4710a169-2ce7-48ed-b70f-d637966bbcfd\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.012540 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" event={"ID":"6a3eaabd-8f61-487b-83f8-e458dfa24673","Type":"ContainerStarted","Data":"9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.012696 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" event={"ID":"6a3eaabd-8f61-487b-83f8-e458dfa24673","Type":"ContainerStarted","Data":"6ed6e806ce9423f107e21e9f420439947ec4e51d9346634c316400fd6a4589bb"} Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.031230 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h698n\" (UniqueName: \"kubernetes.io/projected/ca4fb91d-3cfc-47c9-9e47-a4aae006b200-kube-api-access-h698n\") pod \"dns-default-cvr94\" (UID: \"ca4fb91d-3cfc-47c9-9e47-a4aae006b200\") " pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.044113 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxdqs\" (UniqueName: \"kubernetes.io/projected/356675b0-56a2-4687-9158-922a52484fc1-kube-api-access-fxdqs\") pod \"packageserver-d55dfcdfc-r5nn4\" (UID: \"356675b0-56a2-4687-9158-922a52484fc1\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.057447 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.066087 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.066325 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.067003 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.566972507 +0000 UTC m=+147.996178817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.069016 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9f18f9fd-0f3e-4262-9d3f-f657288b0e73-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fxkjx\" (UID: \"9f18f9fd-0f3e-4262-9d3f-f657288b0e73\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.069649 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.080767 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.081069 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.087024 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpg62\" (UniqueName: \"kubernetes.io/projected/a15e5593-aef3-43e1-894e-51a109c501a7-kube-api-access-hpg62\") pod \"router-default-5444994796-qtsfq\" (UID: \"a15e5593-aef3-43e1-894e-51a109c501a7\") " pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.087288 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.108578 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdgmq\" (UniqueName: \"kubernetes.io/projected/39b1b9ce-45b4-45ff-b6d5-e4deedf30e19-kube-api-access-gdgmq\") pod \"migrator-59844c95c7-wszxv\" (UID: \"39b1b9ce-45b4-45ff-b6d5-e4deedf30e19\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.111522 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.117800 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.126681 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h96ks\" (UniqueName: \"kubernetes.io/projected/3cf962b7-91c3-4e33-b2b4-ca2d1e26f089-kube-api-access-h96ks\") pod \"catalog-operator-68c6474976-76wjb\" (UID: \"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.140606 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jf4q\" (UniqueName: \"kubernetes.io/projected/27dbf6ed-7b15-4ddd-84b0-83fa2f178c63-kube-api-access-7jf4q\") pod \"control-plane-machine-set-operator-78cbb6b69f-nsh9z\" (UID: \"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.141582 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kp656"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.152864 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.154570 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8cg7l"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.157217 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.162620 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.168191 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.168524 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.668512406 +0000 UTC m=+148.097718706 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.169769 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29419200-2mqnz"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.180482 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc9qk\" (UniqueName: \"kubernetes.io/projected/fa019532-0002-464b-9965-2804d83126a8-kube-api-access-dc9qk\") pod \"collect-profiles-29419200-dnd2g\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.192463 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-b2dbs"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.193258 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qq7r4" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.194052 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.200733 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4hln\" (UniqueName: \"kubernetes.io/projected/49482394-a214-4307-95c3-1d75283d2ff4-kube-api-access-p4hln\") pod \"machine-config-operator-74547568cd-cz8h4\" (UID: \"49482394-a214-4307-95c3-1d75283d2ff4\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.204611 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.226209 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bhjz\" (UniqueName: \"kubernetes.io/projected/37a32702-19b8-4fe3-8eda-52e5a39db569-kube-api-access-6bhjz\") pod \"csi-hostpathplugin-sb889\" (UID: \"37a32702-19b8-4fe3-8eda-52e5a39db569\") " pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.255041 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmvl9\" (UniqueName: \"kubernetes.io/projected/b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e-kube-api-access-rmvl9\") pod \"olm-operator-6b444d44fb-hzb8b\" (UID: \"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.266562 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zcch\" (UniqueName: \"kubernetes.io/projected/615970c1-cf68-4be8-b528-37937fa778ab-kube-api-access-6zcch\") pod \"kube-storage-version-migrator-operator-b67b599dd-v72t9\" (UID: \"615970c1-cf68-4be8-b528-37937fa778ab\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.271692 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.271862 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.771831279 +0000 UTC m=+148.201037579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.272064 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.272520 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.77250542 +0000 UTC m=+148.201711730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.279796 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wm7m5"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.281395 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq629"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.295753 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.304343 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6z6p\" (UniqueName: \"kubernetes.io/projected/1e54638b-8ec2-49df-8c72-25dceedafbd0-kube-api-access-w6z6p\") pod \"machine-config-server-lwc9x\" (UID: \"1e54638b-8ec2-49df-8c72-25dceedafbd0\") " pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.333318 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtzn7\" (UniqueName: \"kubernetes.io/projected/011ba434-d4cf-479a-8732-d8621edc4fcf-kube-api-access-rtzn7\") pod \"package-server-manager-789f6589d5-dfqd7\" (UID: \"011ba434-d4cf-479a-8732-d8621edc4fcf\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.344671 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6z9q\" (UniqueName: \"kubernetes.io/projected/cb60ee68-795e-44a7-896f-cc0ab8963417-kube-api-access-t6z9q\") pod \"machine-config-controller-84d6567774-qvcwp\" (UID: \"cb60ee68-795e-44a7-896f-cc0ab8963417\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.344977 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.352057 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.365477 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.378520 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.381739 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.881693593 +0000 UTC m=+148.310899963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.383488 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.384904 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.385896 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.885747137 +0000 UTC m=+148.314953437 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.396581 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.403369 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.427369 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.434919 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.446486 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.473194 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.473257 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.482730 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-sb889" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.487031 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.487323 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:52.987308216 +0000 UTC m=+148.416514516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.493462 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.499675 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-lwc9x" Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.590320 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.590836 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.090819415 +0000 UTC m=+148.520025725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.599891 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-j77jf"] Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.691123 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.691284 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.19125029 +0000 UTC m=+148.620456630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.691395 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.691758 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.191743435 +0000 UTC m=+148.620949765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.792807 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.793086 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.293070638 +0000 UTC m=+148.722276938 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.910536 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:52 crc kubenswrapper[4745]: E1208 00:09:52.910825 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.410814522 +0000 UTC m=+148.840020822 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:52 crc kubenswrapper[4745]: I1208 00:09:52.961203 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" podStartSLOduration=128.961186234 podStartE2EDuration="2m8.961186234s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:52.957308686 +0000 UTC m=+148.386514986" watchObservedRunningTime="2025-12-08 00:09:52.961186234 +0000 UTC m=+148.390392534" Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.017618 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.017888 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.51787176 +0000 UTC m=+148.947078060 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.018633 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.019068 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.519056306 +0000 UTC m=+148.948262606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.046424 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" event={"ID":"dbea8073-c662-4a72-871b-0abf65d79bc7","Type":"ContainerStarted","Data":"58d1699c59a44d58cac5f12f66bfcf3e4c438d5b5722e4b4a811eeb116eb07fe"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.055546 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kp656" event={"ID":"9683111b-558c-4f07-9908-2caab08063f2","Type":"ContainerStarted","Data":"73ff0c58787ea9ba0d0b6ce9dd4de942e8bb1b317dc7f3e56e7414a72ab12a52"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.058108 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" event={"ID":"dc05c216-95a6-4890-9fc7-7eb70233e104","Type":"ContainerStarted","Data":"b76eac5192a7cf6307f223736ab49d6785a891b99768489f32f72398552b1dce"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.066368 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-j77jf" event={"ID":"b2d515a0-c5ed-4407-8e58-a75c8c485fe3","Type":"ContainerStarted","Data":"2739da05d172570885c2b018b3119909d4e34769c80c81fe748ed4b8118ffac3"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.068537 4745 generic.go:334] "Generic (PLEG): container finished" podID="cd1f7237-e796-4af1-b911-b15b54030e38" containerID="e6e8373b7de414b7b5e7438c3ba1630631362f022f8336e0fc4c8f8d339f7e93" exitCode=0 Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.068596 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" event={"ID":"cd1f7237-e796-4af1-b911-b15b54030e38","Type":"ContainerDied","Data":"e6e8373b7de414b7b5e7438c3ba1630631362f022f8336e0fc4c8f8d339f7e93"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.096009 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" event={"ID":"006160e6-b0e3-4a8f-b297-d4ec96a2e703","Type":"ContainerStarted","Data":"33a6f647d7987736ff4d2b83d237cfd173425f129d8fd9d1438da5d0f1fe3353"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.116889 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29419200-2mqnz" event={"ID":"712511e1-14ba-4465-8050-02b8d5916f46","Type":"ContainerStarted","Data":"07aaaa25b08c710fd9e0e521c72b22f457f757f44b18d01a3e68329adf072e30"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.122341 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.123112 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.623089731 +0000 UTC m=+149.052296041 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.155174 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" event={"ID":"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1","Type":"ContainerStarted","Data":"983f36dfa8a48a31388c48f67b6d4853c6ce094c33c4c8311ee8ddbb882d20b6"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.184015 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" event={"ID":"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963","Type":"ContainerStarted","Data":"b1efb260a88de10a50fa906c9d61aedd05b46e9a5091bf60de2d79a91e8afffe"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.199131 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" event={"ID":"154b1131-dee9-40da-8e6f-a70650023fe5","Type":"ContainerStarted","Data":"6d225f1b9d5cb07c7ff18df875b7a5b868bda36a1e4d38aa7a4e156011cb5e40"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.203805 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-b2dbs" event={"ID":"c377f24c-360e-4c65-ad5d-6423e735d7a4","Type":"ContainerStarted","Data":"445f992b3d18ff3f578d30f7c5c3b1b249b0e26aec2abe03e398dd1e23f0f785"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.204490 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" event={"ID":"88660132-2148-4282-92d7-a9f8d86b07ef","Type":"ContainerStarted","Data":"3c46befe114b8e63489ff1b63807314b5a60f8700562933751825ef134df8a9d"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.205485 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qq7r4"] Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.206801 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" event={"ID":"d4a755dd-bcbd-4cf0-a396-673809d92250","Type":"ContainerStarted","Data":"f10fbf5aded6077a7349f1994d98082ff663c54fe58a1d0e054cec57c2f8cad8"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.223126 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.240322 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.74030528 +0000 UTC m=+149.169511580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.241074 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8cg7l" event={"ID":"a704627f-7539-4aec-ba1a-344a957ab7bf","Type":"ContainerStarted","Data":"0e64b7e7139e2d215195a1aea1cfb48da83c88d4a10197d5237a0a5398ee4eb5"} Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.241113 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.262663 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.268637 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.323773 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.324360 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.824345143 +0000 UTC m=+149.253551443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.324542 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.325345 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.825337063 +0000 UTC m=+149.254543363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.403471 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" podStartSLOduration=128.403453395 podStartE2EDuration="2m8.403453395s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:53.401610589 +0000 UTC m=+148.830816889" watchObservedRunningTime="2025-12-08 00:09:53.403453395 +0000 UTC m=+148.832659695" Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.425528 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.425863 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:53.925845111 +0000 UTC m=+149.355051411 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.530172 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.530540 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.030527286 +0000 UTC m=+149.459733586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.546360 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.631007 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.631393 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.131363303 +0000 UTC m=+149.560569623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.631697 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.632000 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.131987502 +0000 UTC m=+149.561193802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: W1208 00:09:53.691962 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-d9d824ba52977096b93dd1241c3169726365228fe5e1f7809ccde1e964a799fa WatchSource:0}: Error finding container d9d824ba52977096b93dd1241c3169726365228fe5e1f7809ccde1e964a799fa: Status 404 returned error can't find the container with id d9d824ba52977096b93dd1241c3169726365228fe5e1f7809ccde1e964a799fa Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.732508 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.732534 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.23251255 +0000 UTC m=+149.661718860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.732844 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.733162 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.233151479 +0000 UTC m=+149.662357849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.761352 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6s6x7" podStartSLOduration=129.761332582 podStartE2EDuration="2m9.761332582s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:53.761105545 +0000 UTC m=+149.190311845" watchObservedRunningTime="2025-12-08 00:09:53.761332582 +0000 UTC m=+149.190538882" Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.833447 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.834036 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.334021838 +0000 UTC m=+149.763228138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:53 crc kubenswrapper[4745]: I1208 00:09:53.944648 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:53 crc kubenswrapper[4745]: E1208 00:09:53.945226 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.445213132 +0000 UTC m=+149.874419422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.047569 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.048089 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.548074491 +0000 UTC m=+149.977280781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.112674 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-cvr94"] Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.113340 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" podStartSLOduration=130.113331269 podStartE2EDuration="2m10.113331269s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:54.113008279 +0000 UTC m=+149.542214579" watchObservedRunningTime="2025-12-08 00:09:54.113331269 +0000 UTC m=+149.542537569" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.140370 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lc28j"] Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.148830 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.149186 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.649174166 +0000 UTC m=+150.078380466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.163726 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-k98nn" podStartSLOduration=130.163709361 podStartE2EDuration="2m10.163709361s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:54.163306779 +0000 UTC m=+149.592513079" watchObservedRunningTime="2025-12-08 00:09:54.163709361 +0000 UTC m=+149.592915661" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.243790 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-dktn5" podStartSLOduration=129.243773353 podStartE2EDuration="2m9.243773353s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:54.217039864 +0000 UTC m=+149.646246184" watchObservedRunningTime="2025-12-08 00:09:54.243773353 +0000 UTC m=+149.672979643" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.244754 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-rkq5r" podStartSLOduration=130.244746503 podStartE2EDuration="2m10.244746503s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:54.242230755 +0000 UTC m=+149.671437055" watchObservedRunningTime="2025-12-08 00:09:54.244746503 +0000 UTC m=+149.673952823" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.250629 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.251010 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.750993804 +0000 UTC m=+150.180200104 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.264804 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qq7r4" event={"ID":"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed","Type":"ContainerStarted","Data":"35467137c496a23b162acaa37ef7ae60db5e479e8a6609af0c04368a459292a1"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.275665 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" podStartSLOduration=130.275648199 podStartE2EDuration="2m10.275648199s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:54.274819193 +0000 UTC m=+149.704025493" watchObservedRunningTime="2025-12-08 00:09:54.275648199 +0000 UTC m=+149.704854499" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.295957 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lwc9x" event={"ID":"1e54638b-8ec2-49df-8c72-25dceedafbd0","Type":"ContainerStarted","Data":"4b5e060b5246d9951558fed278368d526d803b8e850ceed3a88843b967df66f2"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.297514 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qtsfq" event={"ID":"a15e5593-aef3-43e1-894e-51a109c501a7","Type":"ContainerStarted","Data":"1f7eaf141a08057c20a932f8668bbea1faef92255ca2644c94ea13e430c6841b"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.319569 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8cg7l" event={"ID":"a704627f-7539-4aec-ba1a-344a957ab7bf","Type":"ContainerStarted","Data":"0e98822fc1c8f78fdcf2f43da359cbb0be251ea0d893aafea089350498ed0e02"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.324014 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d9d824ba52977096b93dd1241c3169726365228fe5e1f7809ccde1e964a799fa"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.346667 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8cg7l" podStartSLOduration=130.346643952 podStartE2EDuration="2m10.346643952s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:54.344651551 +0000 UTC m=+149.773857851" watchObservedRunningTime="2025-12-08 00:09:54.346643952 +0000 UTC m=+149.775850252" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.349013 4745 generic.go:334] "Generic (PLEG): container finished" podID="64fa4ddd-3dbd-4910-b8f8-dba1bb97b963" containerID="b1efb260a88de10a50fa906c9d61aedd05b46e9a5091bf60de2d79a91e8afffe" exitCode=0 Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.349128 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" event={"ID":"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963","Type":"ContainerDied","Data":"b1efb260a88de10a50fa906c9d61aedd05b46e9a5091bf60de2d79a91e8afffe"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.349166 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" event={"ID":"64fa4ddd-3dbd-4910-b8f8-dba1bb97b963","Type":"ContainerStarted","Data":"6b9177e8a9121c6e60eb0717ce05f5a16f7084c04cd2f5542336c800a4b2e689"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.349962 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.353021 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.353347 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.853335537 +0000 UTC m=+150.282541837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.375104 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" podStartSLOduration=130.375087763 podStartE2EDuration="2m10.375087763s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:54.374836195 +0000 UTC m=+149.804042495" watchObservedRunningTime="2025-12-08 00:09:54.375087763 +0000 UTC m=+149.804294063" Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.377641 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-9d288" event={"ID":"230159ed-bd2e-46b4-a6ec-fc1ffe7b75b1","Type":"ContainerStarted","Data":"eb57f969c28e3d81b863f57120671a96e6e3c3835dbbe9fb8de3e0aec93ad5ce"} Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.454575 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.455564 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.955526916 +0000 UTC m=+150.384733216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.456143 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.461246 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:54.961229761 +0000 UTC m=+150.390436061 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.557435 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.557971 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.057951562 +0000 UTC m=+150.487157862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.659604 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.659915 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.159902523 +0000 UTC m=+150.589108823 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.760193 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.762134 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.260435981 +0000 UTC m=+150.689642281 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.762182 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.762636 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.262629548 +0000 UTC m=+150.691835848 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.774456 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb"] Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.863301 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qzc8t"] Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.863756 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.868625 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.368600443 +0000 UTC m=+150.797806743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.870284 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.870783 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.370768049 +0000 UTC m=+150.799974349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.976410 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.977111 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.477087473 +0000 UTC m=+150.906293773 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:54 crc kubenswrapper[4745]: I1208 00:09:54.977624 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:54 crc kubenswrapper[4745]: E1208 00:09:54.978010 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.477999321 +0000 UTC m=+150.907205611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.050276 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.085334 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.097691 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.597644194 +0000 UTC m=+151.026850494 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.098192 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.187958 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.188317 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.68830428 +0000 UTC m=+151.117510580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.218797 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-sb889"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.233412 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.281340 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.295553 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.295832 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.795816362 +0000 UTC m=+151.225022662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.326674 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.364373 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.387742 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.396511 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.396815 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:55.896800373 +0000 UTC m=+151.326006673 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.406731 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" event={"ID":"88660132-2148-4282-92d7-a9f8d86b07ef","Type":"ContainerStarted","Data":"2f61945110ab47da7fa654ffa638ed65517048b4b90653c1d8f36a5ee5312886"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.411548 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sb889" event={"ID":"37a32702-19b8-4fe3-8eda-52e5a39db569","Type":"ContainerStarted","Data":"ee57fba6f03ba79af7147a040322172ea2f516ff63b0722da8f33e5a93785b27"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.434841 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cvr94" event={"ID":"ca4fb91d-3cfc-47c9-9e47-a4aae006b200","Type":"ContainerStarted","Data":"515d78901fe9e3592fcd5581e741ddd5a3117292329ebfc86ea54e79c6de8283"} Dec 08 00:09:55 crc kubenswrapper[4745]: W1208 00:09:55.472447 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39b1b9ce_45b4_45ff_b6d5_e4deedf30e19.slice/crio-a50dd5573e3f561caeced9bcb6f74200292c5cb0b5ef1fd9b4bce4182b0f27c3 WatchSource:0}: Error finding container a50dd5573e3f561caeced9bcb6f74200292c5cb0b5ef1fd9b4bce4182b0f27c3: Status 404 returned error can't find the container with id a50dd5573e3f561caeced9bcb6f74200292c5cb0b5ef1fd9b4bce4182b0f27c3 Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.474209 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" event={"ID":"cd1f7237-e796-4af1-b911-b15b54030e38","Type":"ContainerStarted","Data":"f8f4787e04797798cf4d1507f38859ef5e8183140a5fea2166021a7c1553faeb"} Dec 08 00:09:55 crc kubenswrapper[4745]: W1208 00:09:55.475618 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac18e1eb_73d7_42e2_910b_3c86e12ef5e6.slice/crio-1c7ae4b578949d877b3474c62763a48209c37a04e4b229281946b5bf8c2c992c WatchSource:0}: Error finding container 1c7ae4b578949d877b3474c62763a48209c37a04e4b229281946b5bf8c2c992c: Status 404 returned error can't find the container with id 1c7ae4b578949d877b3474c62763a48209c37a04e4b229281946b5bf8c2c992c Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.482704 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qtsfq" event={"ID":"a15e5593-aef3-43e1-894e-51a109c501a7","Type":"ContainerStarted","Data":"7c5895262cb638c98c8bd7734794271ab57ce8012eee53045de749d0e7ea912f"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.498545 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-wm7m5" podStartSLOduration=131.498218168 podStartE2EDuration="2m11.498218168s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.441335207 +0000 UTC m=+150.870541517" watchObservedRunningTime="2025-12-08 00:09:55.498218168 +0000 UTC m=+150.927424468" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.501454 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.503380 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" event={"ID":"31884019-0917-4fda-a319-2d896944a6b8","Type":"ContainerStarted","Data":"eafd94f2b03d265afe9102bd6acf07ff849bc046f37d6c11dcd8aadcc16c5562"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.508475 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" event={"ID":"31884019-0917-4fda-a319-2d896944a6b8","Type":"ContainerStarted","Data":"33a768b644c21d772e81fb856db18008bfb163e830095bb8940716fc2263cff2"} Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.504017 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.003992975 +0000 UTC m=+151.433199275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.508758 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.510416 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.010398761 +0000 UTC m=+151.439605051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.542296 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7h4jt"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.547010 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.555063 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.558769 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-qtsfq" podStartSLOduration=131.558745652 podStartE2EDuration="2m11.558745652s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.524832333 +0000 UTC m=+150.954038643" watchObservedRunningTime="2025-12-08 00:09:55.558745652 +0000 UTC m=+150.987951972" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.590961 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qq7r4" event={"ID":"8151bcec-d2f6-4c68-bbda-c0f31a47f5ed","Type":"ContainerStarted","Data":"dc889beff1f43f0b763bcd71f7ba615032b3356445bd7f04cd7c33a2eb2b76dd"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.609752 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" event={"ID":"4710a169-2ce7-48ed-b70f-d637966bbcfd","Type":"ContainerStarted","Data":"f6c710e870ebde282e75a7df450a8ec3ac254bc01e8eeaf681959fcf621582d2"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.620079 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.621241 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.121209584 +0000 UTC m=+151.550415894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.626559 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kp656" event={"ID":"9683111b-558c-4f07-9908-2caab08063f2","Type":"ContainerStarted","Data":"2ae46da9e2ec061717e4001dc675406a5b31e457b4c28755986506c959578f57"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.626595 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kp656" event={"ID":"9683111b-558c-4f07-9908-2caab08063f2","Type":"ContainerStarted","Data":"396d85945964fe19055307900ee18613d901d064fa620c433492f25fea2389d8"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.663980 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" event={"ID":"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089","Type":"ContainerStarted","Data":"166ff7e5a0c935d5d8b2fa0ef67bfb67ea14af16077a84ee2d0cc7ac36fbab48"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.664865 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.665738 4745 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-76wjb container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" start-of-body= Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.665778 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" podUID="3cf962b7-91c3-4e33-b2b4-ca2d1e26f089" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.673250 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-b2dbs" event={"ID":"c377f24c-360e-4c65-ad5d-6423e735d7a4","Type":"ContainerStarted","Data":"0095e0c205b50ba97914336cf1d6e1c2ed7f144710b6dc9b1eebb6e05061a4be"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.674055 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-b2dbs" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.679514 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lc28j" podStartSLOduration=130.679500649 podStartE2EDuration="2m10.679500649s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.564727815 +0000 UTC m=+150.993934115" watchObservedRunningTime="2025-12-08 00:09:55.679500649 +0000 UTC m=+151.108706949" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.684197 4745 patch_prober.go:28] interesting pod/downloads-7954f5f757-b2dbs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.684242 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-b2dbs" podUID="c377f24c-360e-4c65-ad5d-6423e735d7a4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.691769 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-qq7r4" podStartSLOduration=6.691753354 podStartE2EDuration="6.691753354s" podCreationTimestamp="2025-12-08 00:09:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.68020262 +0000 UTC m=+151.109408920" watchObservedRunningTime="2025-12-08 00:09:55.691753354 +0000 UTC m=+151.120959664" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.693777 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.696432 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29419200-2mqnz" event={"ID":"712511e1-14ba-4465-8050-02b8d5916f46","Type":"ContainerStarted","Data":"2f4c9d3abb3b5c74d4e5dcbdbe041cd056985632cfa3a044d2ac8d9d64f21b09"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.696478 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.701754 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" event={"ID":"dc05c216-95a6-4890-9fc7-7eb70233e104","Type":"ContainerStarted","Data":"481e1c2cf6e74344ab4bf9e42a5a609e2c6c9c358a0fa26c2cd461a07fd3d28d"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.702508 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.711673 4745 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hq629 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/healthz\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.711806 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" podUID="dc05c216-95a6-4890-9fc7-7eb70233e104" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.14:8080/healthz\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.712222 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-kp656" podStartSLOduration=131.71220682 podStartE2EDuration="2m11.71220682s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.711634983 +0000 UTC m=+151.140841273" watchObservedRunningTime="2025-12-08 00:09:55.71220682 +0000 UTC m=+151.141413120" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.717533 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.720236 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx"] Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.722162 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.748364 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.248349817 +0000 UTC m=+151.677556117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.751611 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" podStartSLOduration=130.751594106 podStartE2EDuration="2m10.751594106s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.749488041 +0000 UTC m=+151.178694341" watchObservedRunningTime="2025-12-08 00:09:55.751594106 +0000 UTC m=+151.180800406" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.759442 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" event={"ID":"154b1131-dee9-40da-8e6f-a70650023fe5","Type":"ContainerStarted","Data":"1e0081261c73dc98d66952c689f11f5e7fadfe8b04f6b6f146e5f3b6439a0e9e"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.759492 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" event={"ID":"154b1131-dee9-40da-8e6f-a70650023fe5","Type":"ContainerStarted","Data":"5eaeab4b4932b6d55c42dd78c2dae27ef93b7c695ef62005f36cde1ab13160a2"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.772956 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-b2dbs" podStartSLOduration=131.77294133 podStartE2EDuration="2m11.77294133s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.772191997 +0000 UTC m=+151.201398297" watchObservedRunningTime="2025-12-08 00:09:55.77294133 +0000 UTC m=+151.202147630" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.799269 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"aefa0b5e26fb32648d40a507fa6fa8b3084a949c851597f9a10c3d7b66f642bd"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.799801 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29419200-2mqnz" podStartSLOduration=131.799784631 podStartE2EDuration="2m11.799784631s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.798465241 +0000 UTC m=+151.227671541" watchObservedRunningTime="2025-12-08 00:09:55.799784631 +0000 UTC m=+151.228990921" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.807890 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-lwc9x" event={"ID":"1e54638b-8ec2-49df-8c72-25dceedafbd0","Type":"ContainerStarted","Data":"1e35d15ad50633af40723d48f01404e6262d5fe671760f9a4d0625eb5a824e38"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.822612 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.823703 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.323689183 +0000 UTC m=+151.752895483 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: W1208 00:09:55.825213 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf2f7e37_36bf_4e20_91b9_7f63c53ec998.slice/crio-db402cd67b8c27538c8664e4846d627a841e59ce81e28e153be3d4de70f59d04 WatchSource:0}: Error finding container db402cd67b8c27538c8664e4846d627a841e59ce81e28e153be3d4de70f59d04: Status 404 returned error can't find the container with id db402cd67b8c27538c8664e4846d627a841e59ce81e28e153be3d4de70f59d04 Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.825754 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" event={"ID":"d2d630d5-4198-4d9d-8cdb-22aaf2bcd8fe","Type":"ContainerStarted","Data":"bd2685b6936804c862848e68ac18567211113eeb67657963e942af2996fa1c44"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.867807 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6vpp7" podStartSLOduration=131.867791844 podStartE2EDuration="2m11.867791844s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.821409703 +0000 UTC m=+151.250616003" watchObservedRunningTime="2025-12-08 00:09:55.867791844 +0000 UTC m=+151.296998144" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.888233 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" podStartSLOduration=130.888214659 podStartE2EDuration="2m10.888214659s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.868769113 +0000 UTC m=+151.297975413" watchObservedRunningTime="2025-12-08 00:09:55.888214659 +0000 UTC m=+151.317420959" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.894972 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" event={"ID":"006160e6-b0e3-4a8f-b297-d4ec96a2e703","Type":"ContainerStarted","Data":"ea608c9689209bd5d45b54830a71ba564e650e9839bd10a20e0ba7f84b7b2cec"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.895013 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" event={"ID":"006160e6-b0e3-4a8f-b297-d4ec96a2e703","Type":"ContainerStarted","Data":"fe678907497dc43eda0fd5db6cf499cb66749d5131e6a1cdffa0c9fb022a5789"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.912153 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-lwc9x" podStartSLOduration=6.912134981 podStartE2EDuration="6.912134981s" podCreationTimestamp="2025-12-08 00:09:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.890789188 +0000 UTC m=+151.319995488" watchObservedRunningTime="2025-12-08 00:09:55.912134981 +0000 UTC m=+151.341341281" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.929256 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:55 crc kubenswrapper[4745]: E1208 00:09:55.930632 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.430619277 +0000 UTC m=+151.859825567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.939040 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d6fe23beb43adab1c6065335486e8e61be2136bd87df6e38344a62c75136e809"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.961525 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.961569 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.962559 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" event={"ID":"d685ac29-3c32-478c-ba21-34b3bedc547b","Type":"ContainerStarted","Data":"513b62d15196f027a3fc3022cece3af9456bc6c9fbc27abd7a8e5ffa8ad081e9"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.996977 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-j77jf" event={"ID":"b2d515a0-c5ed-4407-8e58-a75c8c485fe3","Type":"ContainerStarted","Data":"9bdb4929b7da19f9dd0b08db663b1dda6cd9914af593a5c275fdb5e2de96b90a"} Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.997564 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:55 crc kubenswrapper[4745]: I1208 00:09:55.998293 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" podStartSLOduration=130.998281369 podStartE2EDuration="2m10.998281369s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.946764391 +0000 UTC m=+151.375970711" watchObservedRunningTime="2025-12-08 00:09:55.998281369 +0000 UTC m=+151.427487669" Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.010122 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4qqnf" podStartSLOduration=132.010101421 podStartE2EDuration="2m12.010101421s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:55.980000339 +0000 UTC m=+151.409206639" watchObservedRunningTime="2025-12-08 00:09:56.010101421 +0000 UTC m=+151.439307721" Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.026083 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" event={"ID":"49482394-a214-4307-95c3-1d75283d2ff4","Type":"ContainerStarted","Data":"5b1e13884e78b0461ca0b516aa43f08880b0e536d83091e9e608e1c13660b779"} Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.027873 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-j77jf" podStartSLOduration=132.027858094 podStartE2EDuration="2m12.027858094s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:56.026493992 +0000 UTC m=+151.455700292" watchObservedRunningTime="2025-12-08 00:09:56.027858094 +0000 UTC m=+151.457064394" Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.046355 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-j77jf" Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.046805 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.047735 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.547718942 +0000 UTC m=+151.976925242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.047889 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.052919 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.552899051 +0000 UTC m=+151.982105421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.148969 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.150076 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.650059636 +0000 UTC m=+152.079265936 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.249822 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.250229 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.750216532 +0000 UTC m=+152.179422842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.350790 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.350967 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.850941426 +0000 UTC m=+152.280147726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.351384 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.351698 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.851686659 +0000 UTC m=+152.280892959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.372606 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.377422 4745 patch_prober.go:28] interesting pod/router-default-5444994796-qtsfq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 00:09:56 crc kubenswrapper[4745]: [-]has-synced failed: reason withheld Dec 08 00:09:56 crc kubenswrapper[4745]: [+]process-running ok Dec 08 00:09:56 crc kubenswrapper[4745]: healthz check failed Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.377461 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qtsfq" podUID="a15e5593-aef3-43e1-894e-51a109c501a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.452357 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.452716 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:56.952702002 +0000 UTC m=+152.381908302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.535577 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.554940 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.555764 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.055752247 +0000 UTC m=+152.484958547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.657034 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.657197 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.157172952 +0000 UTC m=+152.586379252 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.657283 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.657664 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.157647416 +0000 UTC m=+152.586853716 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.763385 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.763736 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.263722084 +0000 UTC m=+152.692928384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.867615 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.868016 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.367999037 +0000 UTC m=+152.797205327 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:56 crc kubenswrapper[4745]: I1208 00:09:56.969442 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:56 crc kubenswrapper[4745]: E1208 00:09:56.970047 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.47003273 +0000 UTC m=+152.899239030 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.019753 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" event={"ID":"cb60ee68-795e-44a7-896f-cc0ab8963417","Type":"ContainerStarted","Data":"8e8f493a5c776587cedf8cabf1d3dc19f2c60a128a8e8702f37ba866c9df6a7f"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.021044 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" event={"ID":"615970c1-cf68-4be8-b528-37937fa778ab","Type":"ContainerStarted","Data":"c4cce58362ac34c16e1e1f97bec85b73fee905b6da4e92b23fe61b9b1708ac00"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.021067 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" event={"ID":"615970c1-cf68-4be8-b528-37937fa778ab","Type":"ContainerStarted","Data":"0a4aad3e3ea6b9f5f418c559d840e8ee923fac8fac8cefa6ce54f13c91f7dd24"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.028212 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" event={"ID":"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e","Type":"ContainerStarted","Data":"2c2ac5a8428be2f3e04a1b0f8f96c2a4cae7d448a336eec5f2810cce84d9d86d"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.028250 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" event={"ID":"b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e","Type":"ContainerStarted","Data":"c0ca901812ce134563c90c566507c1f52ad14b02d4f2da9e390a827c2fa5bf0f"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.028564 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.030795 4745 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-hzb8b container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" start-of-body= Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.030825 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" podUID="b0a6a5ce-2723-4908-b9ee-fdbf199d0c5e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.031434 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1dd92c94f6e332ecaacff4c9138758b156c5b4383ba701ccb16beb14e57c4b4a"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.031454 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"26c860a7ab719f714dda4bf9d94950e88253d07bf528f023e03ea344f40daf47"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.032040 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.051285 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" event={"ID":"39b1b9ce-45b4-45ff-b6d5-e4deedf30e19","Type":"ContainerStarted","Data":"f398d5cd663b81966d8f64f8552c692b79f16081b4cf04c02017cd5abd699a47"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.051325 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" event={"ID":"39b1b9ce-45b4-45ff-b6d5-e4deedf30e19","Type":"ContainerStarted","Data":"a50dd5573e3f561caeced9bcb6f74200292c5cb0b5ef1fd9b4bce4182b0f27c3"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.056634 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" event={"ID":"4710a169-2ce7-48ed-b70f-d637966bbcfd","Type":"ContainerStarted","Data":"e1d7587df0e7e6d4053988a7381872bf521f4a2634e38010d08be03f1a653787"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.071980 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.072360 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.572343323 +0000 UTC m=+153.001549623 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.079993 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" event={"ID":"cf2f7e37-36bf-4e20-91b9-7f63c53ec998","Type":"ContainerStarted","Data":"d64c15d4dcf29e9b157947480ea873e6d5d206a9592e8f50ddb1e4ad91704fa7"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.080036 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" event={"ID":"cf2f7e37-36bf-4e20-91b9-7f63c53ec998","Type":"ContainerStarted","Data":"db402cd67b8c27538c8664e4846d627a841e59ce81e28e153be3d4de70f59d04"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.092250 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v72t9" podStartSLOduration=133.092231942 podStartE2EDuration="2m13.092231942s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.048832583 +0000 UTC m=+152.478038883" watchObservedRunningTime="2025-12-08 00:09:57.092231942 +0000 UTC m=+152.521438232" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.092901 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sb889" event={"ID":"37a32702-19b8-4fe3-8eda-52e5a39db569","Type":"ContainerStarted","Data":"8527e2442532f6f903d33c77bf7dbdb7ef1c13845610ac7a7a2f0ab1f5bd87db"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.131259 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" podStartSLOduration=132.131240636 podStartE2EDuration="2m12.131240636s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.093554242 +0000 UTC m=+152.522760542" watchObservedRunningTime="2025-12-08 00:09:57.131240636 +0000 UTC m=+152.560446936" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.135240 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" event={"ID":"9f18f9fd-0f3e-4262-9d3f-f657288b0e73","Type":"ContainerStarted","Data":"642cd6ff9f6a982a74cb2e1b853f5985169fd1658700111662e2decc4749d435"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.135290 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" event={"ID":"9f18f9fd-0f3e-4262-9d3f-f657288b0e73","Type":"ContainerStarted","Data":"70ab7f702829579913f86c65470c0523f8fda7b55ee3b92c889c3be7c506c959"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.155901 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6z5q4" podStartSLOduration=133.155883921 podStartE2EDuration="2m13.155883921s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.155325033 +0000 UTC m=+152.584531333" watchObservedRunningTime="2025-12-08 00:09:57.155883921 +0000 UTC m=+152.585090221" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.173276 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.173393 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.673370246 +0000 UTC m=+153.102576546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.173726 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.174881 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.674872302 +0000 UTC m=+153.104078602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.184581 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" event={"ID":"cd1f7237-e796-4af1-b911-b15b54030e38","Type":"ContainerStarted","Data":"2a24ab93359064b1561ac621cb61f0628c1366a690c2beaf376ca9ea290eb36c"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.198524 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" event={"ID":"49482394-a214-4307-95c3-1d75283d2ff4","Type":"ContainerStarted","Data":"a981fe74ebfa3ae95de1dfe70148cbcc05fe616e28445e10c82bd4baf9d4922b"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.198572 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" event={"ID":"49482394-a214-4307-95c3-1d75283d2ff4","Type":"ContainerStarted","Data":"a1f91f7ab0af3de72ee0e0e3374c84aeba442593b5c0bb2ff42b5eb1ce87b47b"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.201881 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cvr94" event={"ID":"ca4fb91d-3cfc-47c9-9e47-a4aae006b200","Type":"ContainerStarted","Data":"45b136cfb05bf6f7308466ed6aa93609a65c8032d6a4c766a232f1fd35035b98"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.201915 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-cvr94" event={"ID":"ca4fb91d-3cfc-47c9-9e47-a4aae006b200","Type":"ContainerStarted","Data":"3afb225799df78abfc0c39f23fa9e2a6704dc70d68428511b6b9c3a2811fa296"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.202368 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-cvr94" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.208528 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ea36d3e3a98f95d8016490545f08fe7bd4e6c497348149ba2e06e965d8a1e753"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.216813 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" event={"ID":"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6","Type":"ContainerStarted","Data":"31eebe517edbfbc10016cd89d365b7a30ceb26b01e9db8f1167b2bb7a0414824"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.216856 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" event={"ID":"ac18e1eb-73d7-42e2-910b-3c86e12ef5e6","Type":"ContainerStarted","Data":"1c7ae4b578949d877b3474c62763a48209c37a04e4b229281946b5bf8c2c992c"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.221449 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" podStartSLOduration=133.221435968 podStartE2EDuration="2m13.221435968s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.221322604 +0000 UTC m=+152.650528924" watchObservedRunningTime="2025-12-08 00:09:57.221435968 +0000 UTC m=+152.650642268" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.222765 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fxkjx" podStartSLOduration=133.222755128 podStartE2EDuration="2m13.222755128s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.183564898 +0000 UTC m=+152.612771198" watchObservedRunningTime="2025-12-08 00:09:57.222755128 +0000 UTC m=+152.651961428" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.237038 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" event={"ID":"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647","Type":"ContainerStarted","Data":"6d0baeffad7190f62363e89a01fb9273bd6a502420250924b8c6712b106c813a"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.237072 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" event={"ID":"ae3419c6-8cc5-4dfc-8ce7-dc6c351d0647","Type":"ContainerStarted","Data":"1c9681ea4354e4188462cd557552d4eacbfe48ba40b669095c7220ae214d1737"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.239012 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" event={"ID":"d685ac29-3c32-478c-ba21-34b3bedc547b","Type":"ContainerStarted","Data":"469c07682145bde5f2d55ca4d7bd53fbd565cef7ae585ca6ab301f43c7b16e47"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.240844 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" event={"ID":"3cf962b7-91c3-4e33-b2b4-ca2d1e26f089","Type":"ContainerStarted","Data":"bd800bff12170445f8ddc1f64cb874b21ddb39dbd712fe60f715a98f214718ef"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.250609 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-76wjb" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.251941 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-x6b49" podStartSLOduration=133.251913061 podStartE2EDuration="2m13.251913061s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.249774005 +0000 UTC m=+152.678980305" watchObservedRunningTime="2025-12-08 00:09:57.251913061 +0000 UTC m=+152.681119361" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.256498 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" event={"ID":"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63","Type":"ContainerStarted","Data":"262a4c3c4d1727807f369bcf1fb134c8ea7e8f25943f25edafb28ccd1d5aabaf"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.256543 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" event={"ID":"27dbf6ed-7b15-4ddd-84b0-83fa2f178c63","Type":"ContainerStarted","Data":"b4365719a3a8652d9f29dd7b223ff837d9528aca5c5fb0adffa71f7c8bcc1e87"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.269183 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" event={"ID":"356675b0-56a2-4687-9158-922a52484fc1","Type":"ContainerStarted","Data":"5e8ee62a4ae4d5c77c3ea2b4ed0624e01091d2b5f6e956403d7996c644213865"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.269226 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" event={"ID":"356675b0-56a2-4687-9158-922a52484fc1","Type":"ContainerStarted","Data":"d7d2e33529471a29a4365d2121a68a63d143e7f40164b26f362aae95031c5eec"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.270988 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.271433 4745 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-r5nn4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" start-of-body= Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.271494 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" podUID="356675b0-56a2-4687-9158-922a52484fc1" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.40:5443/healthz\": dial tcp 10.217.0.40:5443: connect: connection refused" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.276698 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.277719 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.7777036 +0000 UTC m=+153.206909900 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.304592 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.305051 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-cz8h4" podStartSLOduration=133.305034697 podStartE2EDuration="2m13.305034697s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.304403938 +0000 UTC m=+152.733610238" watchObservedRunningTime="2025-12-08 00:09:57.305034697 +0000 UTC m=+152.734240997" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.312644 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" event={"ID":"fa019532-0002-464b-9965-2804d83126a8","Type":"ContainerStarted","Data":"716a91fdc1b5339dc0f7d375e7a74a5360c128b3d167029c7654c426c6fa9168"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.312688 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" event={"ID":"fa019532-0002-464b-9965-2804d83126a8","Type":"ContainerStarted","Data":"fe6e5fbf5b57b5341b465fdfde74f534ae05494a8efe6d70871cc4a066fdd5a7"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.328361 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" event={"ID":"011ba434-d4cf-479a-8732-d8621edc4fcf","Type":"ContainerStarted","Data":"2256df77ace9b4d6368edef64de1a55f4b9900c5ea5b6aae2c74906081ef7203"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.328402 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" event={"ID":"011ba434-d4cf-479a-8732-d8621edc4fcf","Type":"ContainerStarted","Data":"c7be8738f3eed48c502a7cd8c11ccf69028d727fec2c632cc59635dbcfa60640"} Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.328418 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.330198 4745 patch_prober.go:28] interesting pod/downloads-7954f5f757-b2dbs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.330230 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-b2dbs" podUID="c377f24c-360e-4c65-ad5d-6423e735d7a4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.347250 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.349085 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-2nbvg" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.364600 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-cvr94" podStartSLOduration=9.36458361 podStartE2EDuration="9.36458361s" podCreationTimestamp="2025-12-08 00:09:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.356430241 +0000 UTC m=+152.785636541" watchObservedRunningTime="2025-12-08 00:09:57.36458361 +0000 UTC m=+152.793789910" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.369693 4745 patch_prober.go:28] interesting pod/router-default-5444994796-qtsfq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 00:09:57 crc kubenswrapper[4745]: [-]has-synced failed: reason withheld Dec 08 00:09:57 crc kubenswrapper[4745]: [+]process-running ok Dec 08 00:09:57 crc kubenswrapper[4745]: healthz check failed Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.369975 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qtsfq" podUID="a15e5593-aef3-43e1-894e-51a109c501a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.383761 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.387327 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.887300506 +0000 UTC m=+153.316506806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.426209 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" podStartSLOduration=132.426188856 podStartE2EDuration="2m12.426188856s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.406669329 +0000 UTC m=+152.835875629" watchObservedRunningTime="2025-12-08 00:09:57.426188856 +0000 UTC m=+152.855395156" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.487462 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.487659 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.987632058 +0000 UTC m=+153.416838358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.488408 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.489095 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:57.989085352 +0000 UTC m=+153.418291652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.582435 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" podStartSLOduration=133.58241857 podStartE2EDuration="2m13.58241857s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.581192312 +0000 UTC m=+153.010398612" watchObservedRunningTime="2025-12-08 00:09:57.58241857 +0000 UTC m=+153.011624870" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.584158 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" podStartSLOduration=132.584150533 podStartE2EDuration="2m12.584150533s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.554555007 +0000 UTC m=+152.983761297" watchObservedRunningTime="2025-12-08 00:09:57.584150533 +0000 UTC m=+153.013356833" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.589139 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.589504 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.089484236 +0000 UTC m=+153.518690536 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.613534 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8kc2b" podStartSLOduration=133.613517222 podStartE2EDuration="2m13.613517222s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.612231422 +0000 UTC m=+153.041437722" watchObservedRunningTime="2025-12-08 00:09:57.613517222 +0000 UTC m=+153.042723522" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.648115 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-nsh9z" podStartSLOduration=132.648101321 podStartE2EDuration="2m12.648101321s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.645555483 +0000 UTC m=+153.074761773" watchObservedRunningTime="2025-12-08 00:09:57.648101321 +0000 UTC m=+153.077307621" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.685567 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-7h4jt" podStartSLOduration=132.685553367 podStartE2EDuration="2m12.685553367s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:57.68432785 +0000 UTC m=+153.113534150" watchObservedRunningTime="2025-12-08 00:09:57.685553367 +0000 UTC m=+153.114759657" Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.690230 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.690530 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.190519569 +0000 UTC m=+153.619725869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.792351 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.792695 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.292680867 +0000 UTC m=+153.721887167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.893803 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.894426 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.394405892 +0000 UTC m=+153.823612262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.994859 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.995079 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.495053833 +0000 UTC m=+153.924260133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:57 crc kubenswrapper[4745]: I1208 00:09:57.995500 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:57 crc kubenswrapper[4745]: E1208 00:09:57.995773 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.495765945 +0000 UTC m=+153.924972245 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.096680 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:58 crc kubenswrapper[4745]: E1208 00:09:58.096895 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.59685939 +0000 UTC m=+154.026065700 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.097152 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:58 crc kubenswrapper[4745]: E1208 00:09:58.097486 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.597473039 +0000 UTC m=+154.026679339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.126399 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k66hs"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.127746 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.129658 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.138313 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k66hs"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.146129 4745 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.198376 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:58 crc kubenswrapper[4745]: E1208 00:09:58.198685 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.698669957 +0000 UTC m=+154.127876257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.299530 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-catalog-content\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.299597 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.299675 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqsvw\" (UniqueName: \"kubernetes.io/projected/dd084f85-b44a-4016-9fbd-5f051c4e9a53-kube-api-access-dqsvw\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.299709 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-utilities\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: E1208 00:09:58.300079 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-08 00:09:58.800065472 +0000 UTC m=+154.229271772 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l6mg7" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.313832 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fkwkz"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.314725 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.316245 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.327298 4745 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-08T00:09:58.146157869Z","Handler":null,"Name":""} Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.330853 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fkwkz"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.333646 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" event={"ID":"cb60ee68-795e-44a7-896f-cc0ab8963417","Type":"ContainerStarted","Data":"b63ab3e18718992385f8f13c0216cc99be60f75a9497dfdbb7f23ab7b7456013"} Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.333771 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" event={"ID":"cb60ee68-795e-44a7-896f-cc0ab8963417","Type":"ContainerStarted","Data":"64353296dab2fc7ddc258b4d8870c2269dabe0da870ff47555c2e9130b5ed549"} Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.335057 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sb889" event={"ID":"37a32702-19b8-4fe3-8eda-52e5a39db569","Type":"ContainerStarted","Data":"42f76fef467440659ac7d64360c28fed0f6144a2e97102565adec8c76d0c73dd"} Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.336091 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" event={"ID":"39b1b9ce-45b4-45ff-b6d5-e4deedf30e19","Type":"ContainerStarted","Data":"cfaf84668d70b1aef00f947d6424390f8fa32fad21d819b0d698a4b8b9017b1d"} Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.337633 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" event={"ID":"011ba434-d4cf-479a-8732-d8621edc4fcf","Type":"ContainerStarted","Data":"d6849d56ec192ee0bd995b63dbdf1daeb05a20f009f13be5d932bcd939be50f3"} Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.339955 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" event={"ID":"4710a169-2ce7-48ed-b70f-d637966bbcfd","Type":"ContainerStarted","Data":"38a8cc02bb45df335783b96dba7a3abbce34d351c025b27ef03556c8716ac328"} Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.341139 4745 patch_prober.go:28] interesting pod/downloads-7954f5f757-b2dbs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.341173 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-b2dbs" podUID="c377f24c-360e-4c65-ad5d-6423e735d7a4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.353701 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hzb8b" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.372418 4745 patch_prober.go:28] interesting pod/router-default-5444994796-qtsfq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 00:09:58 crc kubenswrapper[4745]: [-]has-synced failed: reason withheld Dec 08 00:09:58 crc kubenswrapper[4745]: [+]process-running ok Dec 08 00:09:58 crc kubenswrapper[4745]: healthz check failed Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.372772 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qtsfq" podUID="a15e5593-aef3-43e1-894e-51a109c501a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.386903 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qvcwp" podStartSLOduration=134.386863339 podStartE2EDuration="2m14.386863339s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:58.385016162 +0000 UTC m=+153.814222462" watchObservedRunningTime="2025-12-08 00:09:58.386863339 +0000 UTC m=+153.816069639" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.389917 4745 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.389976 4745 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.402584 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.403319 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqsvw\" (UniqueName: \"kubernetes.io/projected/dd084f85-b44a-4016-9fbd-5f051c4e9a53-kube-api-access-dqsvw\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.403416 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-utilities\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.403497 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-catalog-content\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.404205 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-catalog-content\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.404957 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-utilities\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.425305 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-qzc8t" podStartSLOduration=134.425284195 podStartE2EDuration="2m14.425284195s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:58.420245221 +0000 UTC m=+153.849451531" watchObservedRunningTime="2025-12-08 00:09:58.425284195 +0000 UTC m=+153.854490495" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.431237 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqsvw\" (UniqueName: \"kubernetes.io/projected/dd084f85-b44a-4016-9fbd-5f051c4e9a53-kube-api-access-dqsvw\") pod \"certified-operators-k66hs\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.442736 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.489071 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wszxv" podStartSLOduration=133.489057867 podStartE2EDuration="2m13.489057867s" podCreationTimestamp="2025-12-08 00:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:09:58.48720711 +0000 UTC m=+153.916413410" watchObservedRunningTime="2025-12-08 00:09:58.489057867 +0000 UTC m=+153.918264167" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.504992 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6xq7\" (UniqueName: \"kubernetes.io/projected/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-kube-api-access-q6xq7\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.505512 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-utilities\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.505567 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-catalog-content\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.509210 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gdrzd"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.510683 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.534338 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gdrzd"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.569006 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.606648 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-utilities\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.606710 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9926l\" (UniqueName: \"kubernetes.io/projected/be800637-8860-4249-b97f-94c5c87aa8ed-kube-api-access-9926l\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.606744 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6xq7\" (UniqueName: \"kubernetes.io/projected/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-kube-api-access-q6xq7\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.606789 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.606831 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-utilities\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.606848 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-catalog-content\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.606864 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-catalog-content\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.617207 4745 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.617263 4745 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.709197 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9926l\" (UniqueName: \"kubernetes.io/projected/be800637-8860-4249-b97f-94c5c87aa8ed-kube-api-access-9926l\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.709312 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-catalog-content\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.709353 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-utilities\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.709838 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-utilities\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.710644 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-catalog-content\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.712471 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4r7rr"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.713711 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.724042 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4r7rr"] Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.728368 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-utilities\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.728379 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-catalog-content\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.734425 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6xq7\" (UniqueName: \"kubernetes.io/projected/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-kube-api-access-q6xq7\") pod \"community-operators-fkwkz\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.743909 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9926l\" (UniqueName: \"kubernetes.io/projected/be800637-8860-4249-b97f-94c5c87aa8ed-kube-api-access-9926l\") pod \"certified-operators-gdrzd\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.777523 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l6mg7\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.812471 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-catalog-content\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.812546 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-utilities\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.812645 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sl24\" (UniqueName: \"kubernetes.io/projected/b5657276-6ab2-4a60-aa98-b08b3828c1b8-kube-api-access-4sl24\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.854307 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.886180 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.913580 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sl24\" (UniqueName: \"kubernetes.io/projected/b5657276-6ab2-4a60-aa98-b08b3828c1b8-kube-api-access-4sl24\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.913636 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-catalog-content\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.913666 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-utilities\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.914166 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-utilities\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.914686 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-catalog-content\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.917109 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.928265 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.937013 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sl24\" (UniqueName: \"kubernetes.io/projected/b5657276-6ab2-4a60-aa98-b08b3828c1b8-kube-api-access-4sl24\") pod \"community-operators-4r7rr\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:58 crc kubenswrapper[4745]: I1208 00:09:58.954600 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k66hs"] Dec 08 00:09:58 crc kubenswrapper[4745]: W1208 00:09:58.990143 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd084f85_b44a_4016_9fbd_5f051c4e9a53.slice/crio-3a633854a59b7bee0fa14ea423e0439b9d65cb16a9ae01b283768b1695a04feb WatchSource:0}: Error finding container 3a633854a59b7bee0fa14ea423e0439b9d65cb16a9ae01b283768b1695a04feb: Status 404 returned error can't find the container with id 3a633854a59b7bee0fa14ea423e0439b9d65cb16a9ae01b283768b1695a04feb Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.079630 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.125358 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-r5nn4" Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.346390 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k66hs" event={"ID":"dd084f85-b44a-4016-9fbd-5f051c4e9a53","Type":"ContainerStarted","Data":"3a633854a59b7bee0fa14ea423e0439b9d65cb16a9ae01b283768b1695a04feb"} Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.379198 4745 patch_prober.go:28] interesting pod/router-default-5444994796-qtsfq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 00:09:59 crc kubenswrapper[4745]: [-]has-synced failed: reason withheld Dec 08 00:09:59 crc kubenswrapper[4745]: [+]process-running ok Dec 08 00:09:59 crc kubenswrapper[4745]: healthz check failed Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.379265 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qtsfq" podUID="a15e5593-aef3-43e1-894e-51a109c501a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.380840 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sb889" event={"ID":"37a32702-19b8-4fe3-8eda-52e5a39db569","Type":"ContainerStarted","Data":"cf30c252136b76264fa143c2a167dac9913b0fae75b517d39c48722edd074304"} Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.406281 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6mg7"] Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.477606 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gdrzd"] Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.615004 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fkwkz"] Dec 08 00:09:59 crc kubenswrapper[4745]: I1208 00:09:59.615493 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4r7rr"] Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.112435 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-twwmd"] Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.113742 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.115768 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.129306 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-twwmd"] Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.157853 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-utilities\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.157910 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk8gj\" (UniqueName: \"kubernetes.io/projected/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-kube-api-access-hk8gj\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.157999 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-catalog-content\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.259057 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-catalog-content\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.259170 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-utilities\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.259213 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk8gj\" (UniqueName: \"kubernetes.io/projected/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-kube-api-access-hk8gj\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.260127 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-catalog-content\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.260264 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-utilities\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.291607 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk8gj\" (UniqueName: \"kubernetes.io/projected/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-kube-api-access-hk8gj\") pod \"redhat-marketplace-twwmd\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.375664 4745 patch_prober.go:28] interesting pod/router-default-5444994796-qtsfq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 00:10:00 crc kubenswrapper[4745]: [-]has-synced failed: reason withheld Dec 08 00:10:00 crc kubenswrapper[4745]: [+]process-running ok Dec 08 00:10:00 crc kubenswrapper[4745]: healthz check failed Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.375719 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qtsfq" podUID="a15e5593-aef3-43e1-894e-51a109c501a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.387133 4745 generic.go:334] "Generic (PLEG): container finished" podID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerID="11c53739ca58873a1ad1e093dabac6bbadc64ea79bf3ac71bdb1e72aa2f2fb77" exitCode=0 Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.387202 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k66hs" event={"ID":"dd084f85-b44a-4016-9fbd-5f051c4e9a53","Type":"ContainerDied","Data":"11c53739ca58873a1ad1e093dabac6bbadc64ea79bf3ac71bdb1e72aa2f2fb77"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.388814 4745 generic.go:334] "Generic (PLEG): container finished" podID="be800637-8860-4249-b97f-94c5c87aa8ed" containerID="06164353cf425782db830a07bea431f8f43793f954bd30c4754fd2ec2f1bdd22" exitCode=0 Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.388913 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdrzd" event={"ID":"be800637-8860-4249-b97f-94c5c87aa8ed","Type":"ContainerDied","Data":"06164353cf425782db830a07bea431f8f43793f954bd30c4754fd2ec2f1bdd22"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.388978 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdrzd" event={"ID":"be800637-8860-4249-b97f-94c5c87aa8ed","Type":"ContainerStarted","Data":"71696ac0429054eb7ba4b9c0b8b119018e4a9ea4c65b4d3f5d8cd4621d48c4d6"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.389843 4745 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.391378 4745 generic.go:334] "Generic (PLEG): container finished" podID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerID="538af921cea7869e325ef7f220c9c5dcee99792e80c7ae061e99dd6365bc6882" exitCode=0 Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.391443 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkwkz" event={"ID":"76ddbfcb-3447-4c68-a36e-fc310ab2f75b","Type":"ContainerDied","Data":"538af921cea7869e325ef7f220c9c5dcee99792e80c7ae061e99dd6365bc6882"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.391617 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkwkz" event={"ID":"76ddbfcb-3447-4c68-a36e-fc310ab2f75b","Type":"ContainerStarted","Data":"12a80f2b21942907ef94adb6472bb6b527be87680f1356753d9785620d2212d9"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.397264 4745 generic.go:334] "Generic (PLEG): container finished" podID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerID="6886b3bcf75c1f264f92922a4fade79e6d34b21f7425b7f90324229fe048c178" exitCode=0 Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.397348 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4r7rr" event={"ID":"b5657276-6ab2-4a60-aa98-b08b3828c1b8","Type":"ContainerDied","Data":"6886b3bcf75c1f264f92922a4fade79e6d34b21f7425b7f90324229fe048c178"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.397386 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4r7rr" event={"ID":"b5657276-6ab2-4a60-aa98-b08b3828c1b8","Type":"ContainerStarted","Data":"825bd23548f68c37a5467494c80290646d75e58e37d3ab29390eb5fc6b892287"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.406488 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sb889" event={"ID":"37a32702-19b8-4fe3-8eda-52e5a39db569","Type":"ContainerStarted","Data":"0ae49fe6e154d0c1edb3fb608933e741239addebd7a808968de27992560e2c59"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.408835 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" event={"ID":"1c9c3543-96ff-4a6f-9499-95bd43aa7368","Type":"ContainerStarted","Data":"82f83256dd32d6fb3d1078e9213ffc05a6a52d25a4c48b639fede29f66d087a2"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.408883 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" event={"ID":"1c9c3543-96ff-4a6f-9499-95bd43aa7368","Type":"ContainerStarted","Data":"ad8677310aabdbadb468a5c190aa1005302a2e8b321b5f161441e0ef2d702c5e"} Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.409099 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.427354 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.479683 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-sb889" podStartSLOduration=11.479661992 podStartE2EDuration="11.479661992s" podCreationTimestamp="2025-12-08 00:09:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:10:00.475260798 +0000 UTC m=+155.904467098" watchObservedRunningTime="2025-12-08 00:10:00.479661992 +0000 UTC m=+155.908868302" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.517041 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pqrlr"] Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.520771 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.522690 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqrlr"] Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.530270 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" podStartSLOduration=136.530244351 podStartE2EDuration="2m16.530244351s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:10:00.521536975 +0000 UTC m=+155.950743275" watchObservedRunningTime="2025-12-08 00:10:00.530244351 +0000 UTC m=+155.959450651" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.563708 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-utilities\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.563768 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzhx5\" (UniqueName: \"kubernetes.io/projected/40aebf93-f654-463e-b4cd-c5f13850fee6-kube-api-access-tzhx5\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.563830 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-catalog-content\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.664821 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzhx5\" (UniqueName: \"kubernetes.io/projected/40aebf93-f654-463e-b4cd-c5f13850fee6-kube-api-access-tzhx5\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.664890 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-catalog-content\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.665030 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-utilities\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.665621 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-utilities\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.665914 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-catalog-content\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.686258 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzhx5\" (UniqueName: \"kubernetes.io/projected/40aebf93-f654-463e-b4cd-c5f13850fee6-kube-api-access-tzhx5\") pod \"redhat-marketplace-pqrlr\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.718615 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-twwmd"] Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.821170 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.821211 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.827400 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:10:00 crc kubenswrapper[4745]: I1208 00:10:00.834292 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.028655 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqrlr"] Dec 08 00:10:01 crc kubenswrapper[4745]: W1208 00:10:01.041414 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40aebf93_f654_463e_b4cd_c5f13850fee6.slice/crio-236770113af3beaf9bdc4d440233df2458f5c6607584694139fab8e83b383285 WatchSource:0}: Error finding container 236770113af3beaf9bdc4d440233df2458f5c6607584694139fab8e83b383285: Status 404 returned error can't find the container with id 236770113af3beaf9bdc4d440233df2458f5c6607584694139fab8e83b383285 Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.370448 4745 patch_prober.go:28] interesting pod/router-default-5444994796-qtsfq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 00:10:01 crc kubenswrapper[4745]: [-]has-synced failed: reason withheld Dec 08 00:10:01 crc kubenswrapper[4745]: [+]process-running ok Dec 08 00:10:01 crc kubenswrapper[4745]: healthz check failed Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.370558 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qtsfq" podUID="a15e5593-aef3-43e1-894e-51a109c501a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.510643 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l6kk8"] Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.512639 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.515396 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.527304 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6kk8"] Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.578174 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-catalog-content\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.578265 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-utilities\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.578523 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75bj7\" (UniqueName: \"kubernetes.io/projected/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-kube-api-access-75bj7\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.604368 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.605164 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.607236 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.608331 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.610552 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.680240 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75bj7\" (UniqueName: \"kubernetes.io/projected/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-kube-api-access-75bj7\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.680549 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-catalog-content\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.680675 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-utilities\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.680256 4745 patch_prober.go:28] interesting pod/downloads-7954f5f757-b2dbs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.680830 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-b2dbs" podUID="c377f24c-360e-4c65-ad5d-6423e735d7a4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.680293 4745 patch_prober.go:28] interesting pod/downloads-7954f5f757-b2dbs container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.680970 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-b2dbs" podUID="c377f24c-360e-4c65-ad5d-6423e735d7a4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.681019 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-catalog-content\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.681116 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-utilities\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.704724 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75bj7\" (UniqueName: \"kubernetes.io/projected/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-kube-api-access-75bj7\") pod \"redhat-operators-l6kk8\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.782337 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c106ed83-7956-4fae-9913-3223aaedfcdf-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.782474 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c106ed83-7956-4fae-9913-3223aaedfcdf-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.878916 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.883970 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c106ed83-7956-4fae-9913-3223aaedfcdf-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.884130 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c106ed83-7956-4fae-9913-3223aaedfcdf-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.884228 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c106ed83-7956-4fae-9913-3223aaedfcdf-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.900904 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c106ed83-7956-4fae-9913-3223aaedfcdf-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.902959 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.903007 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.904219 4745 patch_prober.go:28] interesting pod/console-f9d7485db-8cg7l container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.904295 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8cg7l" podUID="a704627f-7539-4aec-ba1a-344a957ab7bf" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.910694 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pc9ns"] Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.911834 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.922619 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.945278 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pc9ns"] Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.981698 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.983954 4745 generic.go:334] "Generic (PLEG): container finished" podID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerID="0a9d9e98ef256d82bba214b27758236c2abce043235d6525796fa241712c394b" exitCode=0 Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.984632 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-twwmd" event={"ID":"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0","Type":"ContainerDied","Data":"0a9d9e98ef256d82bba214b27758236c2abce043235d6525796fa241712c394b"} Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.984665 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-twwmd" event={"ID":"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0","Type":"ContainerStarted","Data":"09dd50cf9914e2abe80924985aab0597a35880dcbea7bc61a0fd8491c0334e2f"} Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.985678 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc892\" (UniqueName: \"kubernetes.io/projected/463b12ee-0c67-452b-8a10-7330ec0224aa-kube-api-access-qc892\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.985762 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-catalog-content\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.985789 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-utilities\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.989323 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqrlr" event={"ID":"40aebf93-f654-463e-b4cd-c5f13850fee6","Type":"ContainerStarted","Data":"236770113af3beaf9bdc4d440233df2458f5c6607584694139fab8e83b383285"} Dec 08 00:10:01 crc kubenswrapper[4745]: I1208 00:10:01.997112 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-lx4dj" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.098480 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-catalog-content\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.098557 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-utilities\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.098715 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc892\" (UniqueName: \"kubernetes.io/projected/463b12ee-0c67-452b-8a10-7330ec0224aa-kube-api-access-qc892\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.102967 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-catalog-content\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.103257 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-utilities\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.169710 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc892\" (UniqueName: \"kubernetes.io/projected/463b12ee-0c67-452b-8a10-7330ec0224aa-kube-api-access-qc892\") pod \"redhat-operators-pc9ns\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.193537 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.309971 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l6kk8"] Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.367690 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.389270 4745 patch_prober.go:28] interesting pod/router-default-5444994796-qtsfq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 08 00:10:02 crc kubenswrapper[4745]: [-]has-synced failed: reason withheld Dec 08 00:10:02 crc kubenswrapper[4745]: [+]process-running ok Dec 08 00:10:02 crc kubenswrapper[4745]: healthz check failed Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.389557 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qtsfq" podUID="a15e5593-aef3-43e1-894e-51a109c501a7" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.600479 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.838646 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pc9ns"] Dec 08 00:10:02 crc kubenswrapper[4745]: W1208 00:10:02.843190 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod463b12ee_0c67_452b_8a10_7330ec0224aa.slice/crio-d987150e0cda186d8ab4ad3b101a501f755335ae8a8905cdba9c5a088d94ac64 WatchSource:0}: Error finding container d987150e0cda186d8ab4ad3b101a501f755335ae8a8905cdba9c5a088d94ac64: Status 404 returned error can't find the container with id d987150e0cda186d8ab4ad3b101a501f755335ae8a8905cdba9c5a088d94ac64 Dec 08 00:10:02 crc kubenswrapper[4745]: I1208 00:10:02.999128 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c106ed83-7956-4fae-9913-3223aaedfcdf","Type":"ContainerStarted","Data":"96276ed800c57b635aeeb59c88a3951c4828fe6093b0b91d34455ae15eab2dea"} Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.008452 4745 generic.go:334] "Generic (PLEG): container finished" podID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerID="80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f" exitCode=0 Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.008513 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqrlr" event={"ID":"40aebf93-f654-463e-b4cd-c5f13850fee6","Type":"ContainerDied","Data":"80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f"} Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.014676 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pc9ns" event={"ID":"463b12ee-0c67-452b-8a10-7330ec0224aa","Type":"ContainerStarted","Data":"d987150e0cda186d8ab4ad3b101a501f755335ae8a8905cdba9c5a088d94ac64"} Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.018419 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6kk8" event={"ID":"e9bdca4d-dada-48b2-b9aa-43dd3801eb93","Type":"ContainerStarted","Data":"745825b08f28e38d8f78a18237ae44cb6716d9f47621c74d1d24a9c242df90d6"} Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.018444 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6kk8" event={"ID":"e9bdca4d-dada-48b2-b9aa-43dd3801eb93","Type":"ContainerStarted","Data":"b01cf916fcc3a3fbe808bd9b188289aea14c7339eb4f17fa5674560ec24f3684"} Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.024414 4745 generic.go:334] "Generic (PLEG): container finished" podID="fa019532-0002-464b-9965-2804d83126a8" containerID="716a91fdc1b5339dc0f7d375e7a74a5360c128b3d167029c7654c426c6fa9168" exitCode=0 Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.024967 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" event={"ID":"fa019532-0002-464b-9965-2804d83126a8","Type":"ContainerDied","Data":"716a91fdc1b5339dc0f7d375e7a74a5360c128b3d167029c7654c426c6fa9168"} Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.369517 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:10:03 crc kubenswrapper[4745]: I1208 00:10:03.377018 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-qtsfq" Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.032730 4745 generic.go:334] "Generic (PLEG): container finished" podID="c106ed83-7956-4fae-9913-3223aaedfcdf" containerID="07e112aec1ba53da091857fefcd6ad5aa1f88f88ce51caaf7d5e088b60c76a88" exitCode=0 Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.032822 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c106ed83-7956-4fae-9913-3223aaedfcdf","Type":"ContainerDied","Data":"07e112aec1ba53da091857fefcd6ad5aa1f88f88ce51caaf7d5e088b60c76a88"} Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.034618 4745 generic.go:334] "Generic (PLEG): container finished" podID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerID="8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66" exitCode=0 Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.034681 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pc9ns" event={"ID":"463b12ee-0c67-452b-8a10-7330ec0224aa","Type":"ContainerDied","Data":"8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66"} Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.037813 4745 generic.go:334] "Generic (PLEG): container finished" podID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerID="745825b08f28e38d8f78a18237ae44cb6716d9f47621c74d1d24a9c242df90d6" exitCode=0 Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.037878 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6kk8" event={"ID":"e9bdca4d-dada-48b2-b9aa-43dd3801eb93","Type":"ContainerDied","Data":"745825b08f28e38d8f78a18237ae44cb6716d9f47621c74d1d24a9c242df90d6"} Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.383663 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.555401 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dc9qk\" (UniqueName: \"kubernetes.io/projected/fa019532-0002-464b-9965-2804d83126a8-kube-api-access-dc9qk\") pod \"fa019532-0002-464b-9965-2804d83126a8\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.555745 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa019532-0002-464b-9965-2804d83126a8-secret-volume\") pod \"fa019532-0002-464b-9965-2804d83126a8\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.556530 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa019532-0002-464b-9965-2804d83126a8-config-volume" (OuterVolumeSpecName: "config-volume") pod "fa019532-0002-464b-9965-2804d83126a8" (UID: "fa019532-0002-464b-9965-2804d83126a8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.555782 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa019532-0002-464b-9965-2804d83126a8-config-volume\") pod \"fa019532-0002-464b-9965-2804d83126a8\" (UID: \"fa019532-0002-464b-9965-2804d83126a8\") " Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.557181 4745 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fa019532-0002-464b-9965-2804d83126a8-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.596262 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa019532-0002-464b-9965-2804d83126a8-kube-api-access-dc9qk" (OuterVolumeSpecName: "kube-api-access-dc9qk") pod "fa019532-0002-464b-9965-2804d83126a8" (UID: "fa019532-0002-464b-9965-2804d83126a8"). InnerVolumeSpecName "kube-api-access-dc9qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.607260 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa019532-0002-464b-9965-2804d83126a8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fa019532-0002-464b-9965-2804d83126a8" (UID: "fa019532-0002-464b-9965-2804d83126a8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.658417 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dc9qk\" (UniqueName: \"kubernetes.io/projected/fa019532-0002-464b-9965-2804d83126a8-kube-api-access-dc9qk\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:04 crc kubenswrapper[4745]: I1208 00:10:04.658461 4745 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fa019532-0002-464b-9965-2804d83126a8-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.053493 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.053975 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419200-dnd2g" event={"ID":"fa019532-0002-464b-9965-2804d83126a8","Type":"ContainerDied","Data":"fe6e5fbf5b57b5341b465fdfde74f534ae05494a8efe6d70871cc4a066fdd5a7"} Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.053998 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe6e5fbf5b57b5341b465fdfde74f534ae05494a8efe6d70871cc4a066fdd5a7" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.421944 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.467515 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 00:10:05 crc kubenswrapper[4745]: E1208 00:10:05.467740 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa019532-0002-464b-9965-2804d83126a8" containerName="collect-profiles" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.467756 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa019532-0002-464b-9965-2804d83126a8" containerName="collect-profiles" Dec 08 00:10:05 crc kubenswrapper[4745]: E1208 00:10:05.467771 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c106ed83-7956-4fae-9913-3223aaedfcdf" containerName="pruner" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.467779 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="c106ed83-7956-4fae-9913-3223aaedfcdf" containerName="pruner" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.467874 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="c106ed83-7956-4fae-9913-3223aaedfcdf" containerName="pruner" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.467884 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa019532-0002-464b-9965-2804d83126a8" containerName="collect-profiles" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.468295 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.471768 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.475292 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.485012 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.585253 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c106ed83-7956-4fae-9913-3223aaedfcdf-kubelet-dir\") pod \"c106ed83-7956-4fae-9913-3223aaedfcdf\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.585366 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c106ed83-7956-4fae-9913-3223aaedfcdf-kube-api-access\") pod \"c106ed83-7956-4fae-9913-3223aaedfcdf\" (UID: \"c106ed83-7956-4fae-9913-3223aaedfcdf\") " Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.585567 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e87f08d1-0dea-4210-aa90-01b76a8bd273-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.585622 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e87f08d1-0dea-4210-aa90-01b76a8bd273-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.585716 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c106ed83-7956-4fae-9913-3223aaedfcdf-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c106ed83-7956-4fae-9913-3223aaedfcdf" (UID: "c106ed83-7956-4fae-9913-3223aaedfcdf"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.593096 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c106ed83-7956-4fae-9913-3223aaedfcdf-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c106ed83-7956-4fae-9913-3223aaedfcdf" (UID: "c106ed83-7956-4fae-9913-3223aaedfcdf"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.686960 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e87f08d1-0dea-4210-aa90-01b76a8bd273-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.687060 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e87f08d1-0dea-4210-aa90-01b76a8bd273-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.687153 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c106ed83-7956-4fae-9913-3223aaedfcdf-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.687168 4745 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c106ed83-7956-4fae-9913-3223aaedfcdf-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.687218 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e87f08d1-0dea-4210-aa90-01b76a8bd273-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.702139 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e87f08d1-0dea-4210-aa90-01b76a8bd273-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:05 crc kubenswrapper[4745]: I1208 00:10:05.824914 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:06 crc kubenswrapper[4745]: I1208 00:10:06.076256 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c106ed83-7956-4fae-9913-3223aaedfcdf","Type":"ContainerDied","Data":"96276ed800c57b635aeeb59c88a3951c4828fe6093b0b91d34455ae15eab2dea"} Dec 08 00:10:06 crc kubenswrapper[4745]: I1208 00:10:06.076290 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96276ed800c57b635aeeb59c88a3951c4828fe6093b0b91d34455ae15eab2dea" Dec 08 00:10:06 crc kubenswrapper[4745]: I1208 00:10:06.076326 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 08 00:10:06 crc kubenswrapper[4745]: I1208 00:10:06.099671 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 08 00:10:06 crc kubenswrapper[4745]: W1208 00:10:06.109177 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode87f08d1_0dea_4210_aa90_01b76a8bd273.slice/crio-8c766f750ef5f1b6851258266e51c48743c3f614b893a5bdc396eb2efa264094 WatchSource:0}: Error finding container 8c766f750ef5f1b6851258266e51c48743c3f614b893a5bdc396eb2efa264094: Status 404 returned error can't find the container with id 8c766f750ef5f1b6851258266e51c48743c3f614b893a5bdc396eb2efa264094 Dec 08 00:10:07 crc kubenswrapper[4745]: I1208 00:10:07.092937 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e87f08d1-0dea-4210-aa90-01b76a8bd273","Type":"ContainerStarted","Data":"8c766f750ef5f1b6851258266e51c48743c3f614b893a5bdc396eb2efa264094"} Dec 08 00:10:07 crc kubenswrapper[4745]: I1208 00:10:07.114686 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-cvr94" Dec 08 00:10:08 crc kubenswrapper[4745]: I1208 00:10:08.102495 4745 generic.go:334] "Generic (PLEG): container finished" podID="e87f08d1-0dea-4210-aa90-01b76a8bd273" containerID="314e7db19cf81f8055bef930e07f43862be472f52589e47fdc70080fe1ba0266" exitCode=0 Dec 08 00:10:08 crc kubenswrapper[4745]: I1208 00:10:08.102547 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e87f08d1-0dea-4210-aa90-01b76a8bd273","Type":"ContainerDied","Data":"314e7db19cf81f8055bef930e07f43862be472f52589e47fdc70080fe1ba0266"} Dec 08 00:10:08 crc kubenswrapper[4745]: I1208 00:10:08.366788 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:10:08 crc kubenswrapper[4745]: I1208 00:10:08.383679 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c402d875-2477-4bda-872a-da631b5b5ff7-metrics-certs\") pod \"network-metrics-daemon-zpkz9\" (UID: \"c402d875-2477-4bda-872a-da631b5b5ff7\") " pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:10:08 crc kubenswrapper[4745]: I1208 00:10:08.508451 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-zpkz9" Dec 08 00:10:11 crc kubenswrapper[4745]: I1208 00:10:11.685317 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-b2dbs" Dec 08 00:10:12 crc kubenswrapper[4745]: I1208 00:10:12.012166 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:10:12 crc kubenswrapper[4745]: I1208 00:10:12.016091 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8cg7l" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.008281 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.044014 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e87f08d1-0dea-4210-aa90-01b76a8bd273-kubelet-dir\") pod \"e87f08d1-0dea-4210-aa90-01b76a8bd273\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.044085 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e87f08d1-0dea-4210-aa90-01b76a8bd273-kube-api-access\") pod \"e87f08d1-0dea-4210-aa90-01b76a8bd273\" (UID: \"e87f08d1-0dea-4210-aa90-01b76a8bd273\") " Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.044172 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e87f08d1-0dea-4210-aa90-01b76a8bd273-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e87f08d1-0dea-4210-aa90-01b76a8bd273" (UID: "e87f08d1-0dea-4210-aa90-01b76a8bd273"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.044710 4745 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e87f08d1-0dea-4210-aa90-01b76a8bd273-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.051161 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87f08d1-0dea-4210-aa90-01b76a8bd273-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e87f08d1-0dea-4210-aa90-01b76a8bd273" (UID: "e87f08d1-0dea-4210-aa90-01b76a8bd273"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.146217 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e87f08d1-0dea-4210-aa90-01b76a8bd273-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.184629 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e87f08d1-0dea-4210-aa90-01b76a8bd273","Type":"ContainerDied","Data":"8c766f750ef5f1b6851258266e51c48743c3f614b893a5bdc396eb2efa264094"} Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.184675 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c766f750ef5f1b6851258266e51c48743c3f614b893a5bdc396eb2efa264094" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.184719 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 08 00:10:18 crc kubenswrapper[4745]: I1208 00:10:18.891002 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:10:22 crc kubenswrapper[4745]: I1208 00:10:22.465304 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:10:22 crc kubenswrapper[4745]: I1208 00:10:22.465688 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:10:32 crc kubenswrapper[4745]: I1208 00:10:32.434852 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dfqd7" Dec 08 00:10:32 crc kubenswrapper[4745]: I1208 00:10:32.451464 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 08 00:10:33 crc kubenswrapper[4745]: I1208 00:10:33.272587 4745 generic.go:334] "Generic (PLEG): container finished" podID="712511e1-14ba-4465-8050-02b8d5916f46" containerID="2f4c9d3abb3b5c74d4e5dcbdbe041cd056985632cfa3a044d2ac8d9d64f21b09" exitCode=0 Dec 08 00:10:33 crc kubenswrapper[4745]: I1208 00:10:33.272671 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29419200-2mqnz" event={"ID":"712511e1-14ba-4465-8050-02b8d5916f46","Type":"ContainerDied","Data":"2f4c9d3abb3b5c74d4e5dcbdbe041cd056985632cfa3a044d2ac8d9d64f21b09"} Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.078012 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 00:10:37 crc kubenswrapper[4745]: E1208 00:10:37.080012 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87f08d1-0dea-4210-aa90-01b76a8bd273" containerName="pruner" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.080220 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87f08d1-0dea-4210-aa90-01b76a8bd273" containerName="pruner" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.080565 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87f08d1-0dea-4210-aa90-01b76a8bd273" containerName="pruner" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.081420 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.091272 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.091646 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.104352 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.113761 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.113854 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.214476 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.214618 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.214675 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.248284 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:37 crc kubenswrapper[4745]: I1208 00:10:37.449495 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:42 crc kubenswrapper[4745]: E1208 00:10:42.856445 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 08 00:10:42 crc kubenswrapper[4745]: E1208 00:10:42.857316 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qc892,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-pc9ns_openshift-marketplace(463b12ee-0c67-452b-8a10-7330ec0224aa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:42 crc kubenswrapper[4745]: E1208 00:10:42.858490 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-pc9ns" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" Dec 08 00:10:42 crc kubenswrapper[4745]: I1208 00:10:42.867397 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 00:10:42 crc kubenswrapper[4745]: I1208 00:10:42.868521 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:42 crc kubenswrapper[4745]: I1208 00:10:42.904835 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.002365 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.002516 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae5f185-66f5-42c0-8a72-355923a94e40-kube-api-access\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.002600 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-var-lock\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.103808 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-var-lock\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.103909 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.103919 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-var-lock\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.103982 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae5f185-66f5-42c0-8a72-355923a94e40-kube-api-access\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.104093 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.126629 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae5f185-66f5-42c0-8a72-355923a94e40-kube-api-access\") pod \"installer-9-crc\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:43 crc kubenswrapper[4745]: I1208 00:10:43.201021 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:10:44 crc kubenswrapper[4745]: E1208 00:10:44.770059 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-pc9ns" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" Dec 08 00:10:44 crc kubenswrapper[4745]: E1208 00:10:44.951508 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 08 00:10:44 crc kubenswrapper[4745]: E1208 00:10:44.952498 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4sl24,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4r7rr_openshift-marketplace(b5657276-6ab2-4a60-aa98-b08b3828c1b8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:44 crc kubenswrapper[4745]: E1208 00:10:44.953780 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4r7rr" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" Dec 08 00:10:48 crc kubenswrapper[4745]: E1208 00:10:48.089200 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4r7rr" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" Dec 08 00:10:48 crc kubenswrapper[4745]: E1208 00:10:48.199208 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 08 00:10:48 crc kubenswrapper[4745]: E1208 00:10:48.200084 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9926l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gdrzd_openshift-marketplace(be800637-8860-4249-b97f-94c5c87aa8ed): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:48 crc kubenswrapper[4745]: E1208 00:10:48.201737 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gdrzd" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" Dec 08 00:10:48 crc kubenswrapper[4745]: E1208 00:10:48.267027 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 08 00:10:48 crc kubenswrapper[4745]: E1208 00:10:48.267232 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-75bj7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-l6kk8_openshift-marketplace(e9bdca4d-dada-48b2-b9aa-43dd3801eb93): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:48 crc kubenswrapper[4745]: E1208 00:10:48.269193 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-l6kk8" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.224247 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gdrzd" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.224282 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-l6kk8" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.282474 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.315378 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.315524 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hk8gj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-twwmd_openshift-marketplace(289a5ef6-f5f7-4225-9790-5cbc5c1bcee0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.317384 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-twwmd" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.319766 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.319892 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tzhx5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-pqrlr_openshift-marketplace(40aebf93-f654-463e-b4cd-c5f13850fee6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.321098 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-pqrlr" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.330840 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.330983 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q6xq7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-fkwkz_openshift-marketplace(76ddbfcb-3447-4c68-a36e-fc310ab2f75b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.332121 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-fkwkz" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.363442 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29419200-2mqnz" event={"ID":"712511e1-14ba-4465-8050-02b8d5916f46","Type":"ContainerDied","Data":"07aaaa25b08c710fd9e0e521c72b22f457f757f44b18d01a3e68329adf072e30"} Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.363477 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07aaaa25b08c710fd9e0e521c72b22f457f757f44b18d01a3e68329adf072e30" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.363530 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29419200-2mqnz" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.364678 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.365166 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dqsvw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-k66hs_openshift-marketplace(dd084f85-b44a-4016-9fbd-5f051c4e9a53): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.366948 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-k66hs" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.367780 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-twwmd" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" Dec 08 00:10:49 crc kubenswrapper[4745]: E1208 00:10:49.367970 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-fkwkz" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.386694 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/712511e1-14ba-4465-8050-02b8d5916f46-serviceca\") pod \"712511e1-14ba-4465-8050-02b8d5916f46\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.386761 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhmnz\" (UniqueName: \"kubernetes.io/projected/712511e1-14ba-4465-8050-02b8d5916f46-kube-api-access-dhmnz\") pod \"712511e1-14ba-4465-8050-02b8d5916f46\" (UID: \"712511e1-14ba-4465-8050-02b8d5916f46\") " Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.393229 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/712511e1-14ba-4465-8050-02b8d5916f46-serviceca" (OuterVolumeSpecName: "serviceca") pod "712511e1-14ba-4465-8050-02b8d5916f46" (UID: "712511e1-14ba-4465-8050-02b8d5916f46"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.404528 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/712511e1-14ba-4465-8050-02b8d5916f46-kube-api-access-dhmnz" (OuterVolumeSpecName: "kube-api-access-dhmnz") pod "712511e1-14ba-4465-8050-02b8d5916f46" (UID: "712511e1-14ba-4465-8050-02b8d5916f46"). InnerVolumeSpecName "kube-api-access-dhmnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.488911 4745 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/712511e1-14ba-4465-8050-02b8d5916f46-serviceca\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.488959 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhmnz\" (UniqueName: \"kubernetes.io/projected/712511e1-14ba-4465-8050-02b8d5916f46-kube-api-access-dhmnz\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.699366 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.701751 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-zpkz9"] Dec 08 00:10:49 crc kubenswrapper[4745]: I1208 00:10:49.706978 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 08 00:10:49 crc kubenswrapper[4745]: W1208 00:10:49.708675 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1c1f4efd_7af4_451c_bc57_150df9d0fc1d.slice/crio-1eb8aab7faab448bc49dcce0d01b7af83d8c038a5ae9a335dcef5d8e440e6d9f WatchSource:0}: Error finding container 1eb8aab7faab448bc49dcce0d01b7af83d8c038a5ae9a335dcef5d8e440e6d9f: Status 404 returned error can't find the container with id 1eb8aab7faab448bc49dcce0d01b7af83d8c038a5ae9a335dcef5d8e440e6d9f Dec 08 00:10:49 crc kubenswrapper[4745]: W1208 00:10:49.713121 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod5ae5f185_66f5_42c0_8a72_355923a94e40.slice/crio-33b68909e488c81b8165738ea2f769e2f0b81726018bf1a763bb70527353dfbf WatchSource:0}: Error finding container 33b68909e488c81b8165738ea2f769e2f0b81726018bf1a763bb70527353dfbf: Status 404 returned error can't find the container with id 33b68909e488c81b8165738ea2f769e2f0b81726018bf1a763bb70527353dfbf Dec 08 00:10:50 crc kubenswrapper[4745]: I1208 00:10:50.370989 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1c1f4efd-7af4-451c-bc57-150df9d0fc1d","Type":"ContainerStarted","Data":"1eb8aab7faab448bc49dcce0d01b7af83d8c038a5ae9a335dcef5d8e440e6d9f"} Dec 08 00:10:50 crc kubenswrapper[4745]: I1208 00:10:50.372877 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" event={"ID":"c402d875-2477-4bda-872a-da631b5b5ff7","Type":"ContainerStarted","Data":"7f79e633375ed0a47a5e2640866f5b9c54e20c1b6bc46c96a9dd6645037120cd"} Dec 08 00:10:50 crc kubenswrapper[4745]: I1208 00:10:50.375041 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ae5f185-66f5-42c0-8a72-355923a94e40","Type":"ContainerStarted","Data":"33b68909e488c81b8165738ea2f769e2f0b81726018bf1a763bb70527353dfbf"} Dec 08 00:10:50 crc kubenswrapper[4745]: E1208 00:10:50.378028 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-k66hs" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" Dec 08 00:10:51 crc kubenswrapper[4745]: I1208 00:10:51.384569 4745 generic.go:334] "Generic (PLEG): container finished" podID="1c1f4efd-7af4-451c-bc57-150df9d0fc1d" containerID="41d514a293970dad753ec8351ddceb08822e79d418498d0ef9e25a3a465386a4" exitCode=0 Dec 08 00:10:51 crc kubenswrapper[4745]: I1208 00:10:51.384610 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1c1f4efd-7af4-451c-bc57-150df9d0fc1d","Type":"ContainerDied","Data":"41d514a293970dad753ec8351ddceb08822e79d418498d0ef9e25a3a465386a4"} Dec 08 00:10:51 crc kubenswrapper[4745]: I1208 00:10:51.388439 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" event={"ID":"c402d875-2477-4bda-872a-da631b5b5ff7","Type":"ContainerStarted","Data":"1b666ba7d99bc544cea7151faeb7af441dfd727455e9ee19100467da195c33c4"} Dec 08 00:10:51 crc kubenswrapper[4745]: I1208 00:10:51.388507 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-zpkz9" event={"ID":"c402d875-2477-4bda-872a-da631b5b5ff7","Type":"ContainerStarted","Data":"3d70860488857247e46c1be566b4b49038a645003cd3f474822614e82c091ae6"} Dec 08 00:10:51 crc kubenswrapper[4745]: I1208 00:10:51.390854 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ae5f185-66f5-42c0-8a72-355923a94e40","Type":"ContainerStarted","Data":"5003433d2932953d97ccc443fa343a361d33cc147457474750c6a3504da20961"} Dec 08 00:10:51 crc kubenswrapper[4745]: I1208 00:10:51.421825 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-zpkz9" podStartSLOduration=187.421802335 podStartE2EDuration="3m7.421802335s" podCreationTimestamp="2025-12-08 00:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:10:51.418263087 +0000 UTC m=+206.847469417" watchObservedRunningTime="2025-12-08 00:10:51.421802335 +0000 UTC m=+206.851008675" Dec 08 00:10:51 crc kubenswrapper[4745]: I1208 00:10:51.440418 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=9.440394232 podStartE2EDuration="9.440394232s" podCreationTimestamp="2025-12-08 00:10:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:10:51.437430653 +0000 UTC m=+206.866636983" watchObservedRunningTime="2025-12-08 00:10:51.440394232 +0000 UTC m=+206.869600542" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.461284 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.461760 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.461825 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.462482 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.462622 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e" gracePeriod=600 Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.685593 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.840293 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kubelet-dir\") pod \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.840500 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kube-api-access\") pod \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\" (UID: \"1c1f4efd-7af4-451c-bc57-150df9d0fc1d\") " Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.840549 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1c1f4efd-7af4-451c-bc57-150df9d0fc1d" (UID: "1c1f4efd-7af4-451c-bc57-150df9d0fc1d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.840909 4745 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.850103 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1c1f4efd-7af4-451c-bc57-150df9d0fc1d" (UID: "1c1f4efd-7af4-451c-bc57-150df9d0fc1d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:10:52 crc kubenswrapper[4745]: I1208 00:10:52.942527 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1c1f4efd-7af4-451c-bc57-150df9d0fc1d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:10:53 crc kubenswrapper[4745]: I1208 00:10:53.406635 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e" exitCode=0 Dec 08 00:10:53 crc kubenswrapper[4745]: I1208 00:10:53.406760 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e"} Dec 08 00:10:53 crc kubenswrapper[4745]: I1208 00:10:53.407292 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"f8980071a87957e128867c229c023064db213f93114f668042757c6e5b0c70b0"} Dec 08 00:10:53 crc kubenswrapper[4745]: I1208 00:10:53.409822 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1c1f4efd-7af4-451c-bc57-150df9d0fc1d","Type":"ContainerDied","Data":"1eb8aab7faab448bc49dcce0d01b7af83d8c038a5ae9a335dcef5d8e440e6d9f"} Dec 08 00:10:53 crc kubenswrapper[4745]: I1208 00:10:53.409873 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1eb8aab7faab448bc49dcce0d01b7af83d8c038a5ae9a335dcef5d8e440e6d9f" Dec 08 00:10:53 crc kubenswrapper[4745]: I1208 00:10:53.409893 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 08 00:10:59 crc kubenswrapper[4745]: I1208 00:10:59.440682 4745 generic.go:334] "Generic (PLEG): container finished" podID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerID="b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5" exitCode=0 Dec 08 00:10:59 crc kubenswrapper[4745]: I1208 00:10:59.440770 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pc9ns" event={"ID":"463b12ee-0c67-452b-8a10-7330ec0224aa","Type":"ContainerDied","Data":"b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5"} Dec 08 00:11:02 crc kubenswrapper[4745]: I1208 00:11:02.457300 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pc9ns" event={"ID":"463b12ee-0c67-452b-8a10-7330ec0224aa","Type":"ContainerStarted","Data":"0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5"} Dec 08 00:11:02 crc kubenswrapper[4745]: I1208 00:11:02.479070 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pc9ns" podStartSLOduration=3.314013571 podStartE2EDuration="1m1.479054036s" podCreationTimestamp="2025-12-08 00:10:01 +0000 UTC" firstStartedPulling="2025-12-08 00:10:04.036344685 +0000 UTC m=+159.465550985" lastFinishedPulling="2025-12-08 00:11:02.20138514 +0000 UTC m=+217.630591450" observedRunningTime="2025-12-08 00:11:02.476590075 +0000 UTC m=+217.905796385" watchObservedRunningTime="2025-12-08 00:11:02.479054036 +0000 UTC m=+217.908260336" Dec 08 00:11:03 crc kubenswrapper[4745]: I1208 00:11:03.475555 4745 generic.go:334] "Generic (PLEG): container finished" podID="be800637-8860-4249-b97f-94c5c87aa8ed" containerID="b5d035e98b91919199916759d2874998312bcd5a1d5bc8852de92d9b7ccbc66f" exitCode=0 Dec 08 00:11:03 crc kubenswrapper[4745]: I1208 00:11:03.476067 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdrzd" event={"ID":"be800637-8860-4249-b97f-94c5c87aa8ed","Type":"ContainerDied","Data":"b5d035e98b91919199916759d2874998312bcd5a1d5bc8852de92d9b7ccbc66f"} Dec 08 00:11:03 crc kubenswrapper[4745]: I1208 00:11:03.480404 4745 generic.go:334] "Generic (PLEG): container finished" podID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerID="16ed8ecb4bfac314d95c0a3f759628b8c9eeb70d097eecd6e41a0b6de47e3d42" exitCode=0 Dec 08 00:11:03 crc kubenswrapper[4745]: I1208 00:11:03.480460 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-twwmd" event={"ID":"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0","Type":"ContainerDied","Data":"16ed8ecb4bfac314d95c0a3f759628b8c9eeb70d097eecd6e41a0b6de47e3d42"} Dec 08 00:11:03 crc kubenswrapper[4745]: I1208 00:11:03.483142 4745 generic.go:334] "Generic (PLEG): container finished" podID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerID="cfcede1292d44c175a8ff9d5196ea47cb2e05f3c8318b6c3fe19d2b29a198a9a" exitCode=0 Dec 08 00:11:03 crc kubenswrapper[4745]: I1208 00:11:03.483171 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6kk8" event={"ID":"e9bdca4d-dada-48b2-b9aa-43dd3801eb93","Type":"ContainerDied","Data":"cfcede1292d44c175a8ff9d5196ea47cb2e05f3c8318b6c3fe19d2b29a198a9a"} Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.492389 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6kk8" event={"ID":"e9bdca4d-dada-48b2-b9aa-43dd3801eb93","Type":"ContainerStarted","Data":"becf97841eb9b27d95aeb007c43ce5f9e462cca7f408bb68eecd5ad0f740ab25"} Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.494563 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdrzd" event={"ID":"be800637-8860-4249-b97f-94c5c87aa8ed","Type":"ContainerStarted","Data":"da7c534a453ee6fc13a8496e7119f4fb245ac7c011d6b689a0ab2eee175d2090"} Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.509324 4745 generic.go:334] "Generic (PLEG): container finished" podID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerID="fa634ace9d401fdae82d6c71fe9ec63135302d1a83b4787b634d4b2366defebd" exitCode=0 Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.509512 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k66hs" event={"ID":"dd084f85-b44a-4016-9fbd-5f051c4e9a53","Type":"ContainerDied","Data":"fa634ace9d401fdae82d6c71fe9ec63135302d1a83b4787b634d4b2366defebd"} Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.513824 4745 generic.go:334] "Generic (PLEG): container finished" podID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerID="f3a5a92ab6bdef98da7f7d7366d1b412fcbdfa9d393b26c60702492c7da3caf9" exitCode=0 Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.513914 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4r7rr" event={"ID":"b5657276-6ab2-4a60-aa98-b08b3828c1b8","Type":"ContainerDied","Data":"f3a5a92ab6bdef98da7f7d7366d1b412fcbdfa9d393b26c60702492c7da3caf9"} Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.517808 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-twwmd" event={"ID":"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0","Type":"ContainerStarted","Data":"81e157b01d1359df8bb200f2530fd9c6bc84f75c466f2a85cc130b9a982f2a60"} Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.532592 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l6kk8" podStartSLOduration=3.694562008 podStartE2EDuration="1m3.532573923s" podCreationTimestamp="2025-12-08 00:10:01 +0000 UTC" firstStartedPulling="2025-12-08 00:10:04.04172867 +0000 UTC m=+159.470934970" lastFinishedPulling="2025-12-08 00:11:03.879740585 +0000 UTC m=+219.308946885" observedRunningTime="2025-12-08 00:11:04.530519384 +0000 UTC m=+219.959725694" watchObservedRunningTime="2025-12-08 00:11:04.532573923 +0000 UTC m=+219.961780223" Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.547111 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-twwmd" podStartSLOduration=2.595580719 podStartE2EDuration="1m4.547097545s" podCreationTimestamp="2025-12-08 00:10:00 +0000 UTC" firstStartedPulling="2025-12-08 00:10:01.987847748 +0000 UTC m=+157.417054068" lastFinishedPulling="2025-12-08 00:11:03.939364594 +0000 UTC m=+219.368570894" observedRunningTime="2025-12-08 00:11:04.542853674 +0000 UTC m=+219.972059974" watchObservedRunningTime="2025-12-08 00:11:04.547097545 +0000 UTC m=+219.976303845" Dec 08 00:11:04 crc kubenswrapper[4745]: I1208 00:11:04.579941 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gdrzd" podStartSLOduration=3.081925993 podStartE2EDuration="1m6.579908684s" podCreationTimestamp="2025-12-08 00:09:58 +0000 UTC" firstStartedPulling="2025-12-08 00:10:00.389735179 +0000 UTC m=+155.818941479" lastFinishedPulling="2025-12-08 00:11:03.88771787 +0000 UTC m=+219.316924170" observedRunningTime="2025-12-08 00:11:04.57951402 +0000 UTC m=+220.008720320" watchObservedRunningTime="2025-12-08 00:11:04.579908684 +0000 UTC m=+220.009114984" Dec 08 00:11:05 crc kubenswrapper[4745]: I1208 00:11:05.524286 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4r7rr" event={"ID":"b5657276-6ab2-4a60-aa98-b08b3828c1b8","Type":"ContainerStarted","Data":"5d908131e7ab1e78d3a7070c01e837525f65954ff6d784fe2f22d979ed445f78"} Dec 08 00:11:05 crc kubenswrapper[4745]: I1208 00:11:05.526203 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k66hs" event={"ID":"dd084f85-b44a-4016-9fbd-5f051c4e9a53","Type":"ContainerStarted","Data":"6f9f9c26f63630472ec5cecfaba5904811af5155b4124d6ebb48c86b2e93fc0a"} Dec 08 00:11:05 crc kubenswrapper[4745]: I1208 00:11:05.546176 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4r7rr" podStartSLOduration=2.913129477 podStartE2EDuration="1m7.546156734s" podCreationTimestamp="2025-12-08 00:09:58 +0000 UTC" firstStartedPulling="2025-12-08 00:10:00.398856958 +0000 UTC m=+155.828063248" lastFinishedPulling="2025-12-08 00:11:05.031884205 +0000 UTC m=+220.461090505" observedRunningTime="2025-12-08 00:11:05.542634277 +0000 UTC m=+220.971840597" watchObservedRunningTime="2025-12-08 00:11:05.546156734 +0000 UTC m=+220.975363034" Dec 08 00:11:06 crc kubenswrapper[4745]: I1208 00:11:06.532897 4745 generic.go:334] "Generic (PLEG): container finished" podID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerID="9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b" exitCode=0 Dec 08 00:11:06 crc kubenswrapper[4745]: I1208 00:11:06.532964 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqrlr" event={"ID":"40aebf93-f654-463e-b4cd-c5f13850fee6","Type":"ContainerDied","Data":"9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b"} Dec 08 00:11:06 crc kubenswrapper[4745]: I1208 00:11:06.536015 4745 generic.go:334] "Generic (PLEG): container finished" podID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerID="c8280e042ea76030f566c02fa88ef58d113f8ae3975ede86e7cc9cc0a9b4cc08" exitCode=0 Dec 08 00:11:06 crc kubenswrapper[4745]: I1208 00:11:06.536045 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkwkz" event={"ID":"76ddbfcb-3447-4c68-a36e-fc310ab2f75b","Type":"ContainerDied","Data":"c8280e042ea76030f566c02fa88ef58d113f8ae3975ede86e7cc9cc0a9b4cc08"} Dec 08 00:11:06 crc kubenswrapper[4745]: I1208 00:11:06.550797 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k66hs" podStartSLOduration=3.978450809 podStartE2EDuration="1m8.550778957s" podCreationTimestamp="2025-12-08 00:09:58 +0000 UTC" firstStartedPulling="2025-12-08 00:10:00.389540993 +0000 UTC m=+155.818747293" lastFinishedPulling="2025-12-08 00:11:04.961869121 +0000 UTC m=+220.391075441" observedRunningTime="2025-12-08 00:11:05.563281952 +0000 UTC m=+220.992488252" watchObservedRunningTime="2025-12-08 00:11:06.550778957 +0000 UTC m=+221.979985257" Dec 08 00:11:07 crc kubenswrapper[4745]: I1208 00:11:07.542696 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkwkz" event={"ID":"76ddbfcb-3447-4c68-a36e-fc310ab2f75b","Type":"ContainerStarted","Data":"35e94280b5fd53bf2fd868802813441e346ba6d8eb4f85c4f7c6fbc1aa3fa1a0"} Dec 08 00:11:07 crc kubenswrapper[4745]: I1208 00:11:07.544534 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqrlr" event={"ID":"40aebf93-f654-463e-b4cd-c5f13850fee6","Type":"ContainerStarted","Data":"d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d"} Dec 08 00:11:07 crc kubenswrapper[4745]: I1208 00:11:07.563929 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fkwkz" podStartSLOduration=3.022950883 podStartE2EDuration="1m9.563914623s" podCreationTimestamp="2025-12-08 00:09:58 +0000 UTC" firstStartedPulling="2025-12-08 00:10:00.394717312 +0000 UTC m=+155.823923612" lastFinishedPulling="2025-12-08 00:11:06.935681052 +0000 UTC m=+222.364887352" observedRunningTime="2025-12-08 00:11:07.561811843 +0000 UTC m=+222.991018143" watchObservedRunningTime="2025-12-08 00:11:07.563914623 +0000 UTC m=+222.993120923" Dec 08 00:11:07 crc kubenswrapper[4745]: I1208 00:11:07.580128 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pqrlr" podStartSLOduration=4.673851747 podStartE2EDuration="1m7.580113461s" podCreationTimestamp="2025-12-08 00:10:00 +0000 UTC" firstStartedPulling="2025-12-08 00:10:04.039103489 +0000 UTC m=+159.468309789" lastFinishedPulling="2025-12-08 00:11:06.945365203 +0000 UTC m=+222.374571503" observedRunningTime="2025-12-08 00:11:07.576890054 +0000 UTC m=+223.006096354" watchObservedRunningTime="2025-12-08 00:11:07.580113461 +0000 UTC m=+223.009319761" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.443903 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.443964 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.521768 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.855520 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.855576 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.895980 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.928676 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:11:08 crc kubenswrapper[4745]: I1208 00:11:08.928738 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:11:09 crc kubenswrapper[4745]: I1208 00:11:09.080661 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:11:09 crc kubenswrapper[4745]: I1208 00:11:09.080727 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:11:09 crc kubenswrapper[4745]: I1208 00:11:09.123209 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:11:09 crc kubenswrapper[4745]: I1208 00:11:09.591502 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:11:09 crc kubenswrapper[4745]: I1208 00:11:09.963305 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-fkwkz" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="registry-server" probeResult="failure" output=< Dec 08 00:11:09 crc kubenswrapper[4745]: timeout: failed to connect service ":50051" within 1s Dec 08 00:11:09 crc kubenswrapper[4745]: > Dec 08 00:11:10 crc kubenswrapper[4745]: I1208 00:11:10.428225 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:11:10 crc kubenswrapper[4745]: I1208 00:11:10.428301 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:11:10 crc kubenswrapper[4745]: I1208 00:11:10.483369 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:11:10 crc kubenswrapper[4745]: I1208 00:11:10.604612 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:11:10 crc kubenswrapper[4745]: I1208 00:11:10.835673 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:11:10 crc kubenswrapper[4745]: I1208 00:11:10.835732 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:11:10 crc kubenswrapper[4745]: I1208 00:11:10.893107 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:11:11 crc kubenswrapper[4745]: I1208 00:11:11.880049 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:11:11 crc kubenswrapper[4745]: I1208 00:11:11.880446 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:11:11 crc kubenswrapper[4745]: I1208 00:11:11.943670 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:11:12 crc kubenswrapper[4745]: I1208 00:11:12.125329 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gdrzd"] Dec 08 00:11:12 crc kubenswrapper[4745]: I1208 00:11:12.125652 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gdrzd" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="registry-server" containerID="cri-o://da7c534a453ee6fc13a8496e7119f4fb245ac7c011d6b689a0ab2eee175d2090" gracePeriod=2 Dec 08 00:11:12 crc kubenswrapper[4745]: I1208 00:11:12.193865 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:11:12 crc kubenswrapper[4745]: I1208 00:11:12.193997 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:11:12 crc kubenswrapper[4745]: I1208 00:11:12.245996 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:11:12 crc kubenswrapper[4745]: I1208 00:11:12.619760 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:11:12 crc kubenswrapper[4745]: I1208 00:11:12.629669 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:11:13 crc kubenswrapper[4745]: I1208 00:11:13.584899 4745 generic.go:334] "Generic (PLEG): container finished" podID="be800637-8860-4249-b97f-94c5c87aa8ed" containerID="da7c534a453ee6fc13a8496e7119f4fb245ac7c011d6b689a0ab2eee175d2090" exitCode=0 Dec 08 00:11:13 crc kubenswrapper[4745]: I1208 00:11:13.584985 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdrzd" event={"ID":"be800637-8860-4249-b97f-94c5c87aa8ed","Type":"ContainerDied","Data":"da7c534a453ee6fc13a8496e7119f4fb245ac7c011d6b689a0ab2eee175d2090"} Dec 08 00:11:13 crc kubenswrapper[4745]: I1208 00:11:13.981768 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.125290 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-utilities\") pod \"be800637-8860-4249-b97f-94c5c87aa8ed\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.125732 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-catalog-content\") pod \"be800637-8860-4249-b97f-94c5c87aa8ed\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.125817 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9926l\" (UniqueName: \"kubernetes.io/projected/be800637-8860-4249-b97f-94c5c87aa8ed-kube-api-access-9926l\") pod \"be800637-8860-4249-b97f-94c5c87aa8ed\" (UID: \"be800637-8860-4249-b97f-94c5c87aa8ed\") " Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.127365 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-utilities" (OuterVolumeSpecName: "utilities") pod "be800637-8860-4249-b97f-94c5c87aa8ed" (UID: "be800637-8860-4249-b97f-94c5c87aa8ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.132397 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be800637-8860-4249-b97f-94c5c87aa8ed-kube-api-access-9926l" (OuterVolumeSpecName: "kube-api-access-9926l") pod "be800637-8860-4249-b97f-94c5c87aa8ed" (UID: "be800637-8860-4249-b97f-94c5c87aa8ed"). InnerVolumeSpecName "kube-api-access-9926l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.227364 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.227406 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9926l\" (UniqueName: \"kubernetes.io/projected/be800637-8860-4249-b97f-94c5c87aa8ed-kube-api-access-9926l\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.269030 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "be800637-8860-4249-b97f-94c5c87aa8ed" (UID: "be800637-8860-4249-b97f-94c5c87aa8ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.328416 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be800637-8860-4249-b97f-94c5c87aa8ed-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.522420 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pc9ns"] Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.595539 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pc9ns" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="registry-server" containerID="cri-o://0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5" gracePeriod=2 Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.595700 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gdrzd" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.597845 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gdrzd" event={"ID":"be800637-8860-4249-b97f-94c5c87aa8ed","Type":"ContainerDied","Data":"71696ac0429054eb7ba4b9c0b8b119018e4a9ea4c65b4d3f5d8cd4621d48c4d6"} Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.597916 4745 scope.go:117] "RemoveContainer" containerID="da7c534a453ee6fc13a8496e7119f4fb245ac7c011d6b689a0ab2eee175d2090" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.616613 4745 scope.go:117] "RemoveContainer" containerID="b5d035e98b91919199916759d2874998312bcd5a1d5bc8852de92d9b7ccbc66f" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.626992 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gdrzd"] Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.627417 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gdrzd"] Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.643471 4745 scope.go:117] "RemoveContainer" containerID="06164353cf425782db830a07bea431f8f43793f954bd30c4754fd2ec2f1bdd22" Dec 08 00:11:14 crc kubenswrapper[4745]: I1208 00:11:14.892007 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" path="/var/lib/kubelet/pods/be800637-8860-4249-b97f-94c5c87aa8ed/volumes" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.139155 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pc9ns_463b12ee-0c67-452b-8a10-7330ec0224aa/registry-server/0.log" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.141198 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.187798 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc892\" (UniqueName: \"kubernetes.io/projected/463b12ee-0c67-452b-8a10-7330ec0224aa-kube-api-access-qc892\") pod \"463b12ee-0c67-452b-8a10-7330ec0224aa\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.187913 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-catalog-content\") pod \"463b12ee-0c67-452b-8a10-7330ec0224aa\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.188009 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-utilities\") pod \"463b12ee-0c67-452b-8a10-7330ec0224aa\" (UID: \"463b12ee-0c67-452b-8a10-7330ec0224aa\") " Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.189070 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-utilities" (OuterVolumeSpecName: "utilities") pod "463b12ee-0c67-452b-8a10-7330ec0224aa" (UID: "463b12ee-0c67-452b-8a10-7330ec0224aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.212148 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/463b12ee-0c67-452b-8a10-7330ec0224aa-kube-api-access-qc892" (OuterVolumeSpecName: "kube-api-access-qc892") pod "463b12ee-0c67-452b-8a10-7330ec0224aa" (UID: "463b12ee-0c67-452b-8a10-7330ec0224aa"). InnerVolumeSpecName "kube-api-access-qc892". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.289773 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.289804 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc892\" (UniqueName: \"kubernetes.io/projected/463b12ee-0c67-452b-8a10-7330ec0224aa-kube-api-access-qc892\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.319451 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "463b12ee-0c67-452b-8a10-7330ec0224aa" (UID: "463b12ee-0c67-452b-8a10-7330ec0224aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.390482 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/463b12ee-0c67-452b-8a10-7330ec0224aa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.621131 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pc9ns_463b12ee-0c67-452b-8a10-7330ec0224aa/registry-server/0.log" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.621921 4745 generic.go:334] "Generic (PLEG): container finished" podID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerID="0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5" exitCode=137 Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.621990 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pc9ns" event={"ID":"463b12ee-0c67-452b-8a10-7330ec0224aa","Type":"ContainerDied","Data":"0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5"} Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.622021 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pc9ns" event={"ID":"463b12ee-0c67-452b-8a10-7330ec0224aa","Type":"ContainerDied","Data":"d987150e0cda186d8ab4ad3b101a501f755335ae8a8905cdba9c5a088d94ac64"} Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.622056 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pc9ns" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.622058 4745 scope.go:117] "RemoveContainer" containerID="0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.644281 4745 scope.go:117] "RemoveContainer" containerID="b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.655727 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pc9ns"] Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.660684 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pc9ns"] Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.670193 4745 scope.go:117] "RemoveContainer" containerID="8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.687027 4745 scope.go:117] "RemoveContainer" containerID="0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5" Dec 08 00:11:18 crc kubenswrapper[4745]: E1208 00:11:18.687571 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5\": container with ID starting with 0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5 not found: ID does not exist" containerID="0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.687615 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5"} err="failed to get container status \"0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5\": rpc error: code = NotFound desc = could not find container \"0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5\": container with ID starting with 0b74821ac02a44e6e6793a43552c6734a1bad9e406e2aa19542dcbc59804a1a5 not found: ID does not exist" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.687646 4745 scope.go:117] "RemoveContainer" containerID="b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5" Dec 08 00:11:18 crc kubenswrapper[4745]: E1208 00:11:18.688039 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5\": container with ID starting with b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5 not found: ID does not exist" containerID="b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.688159 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5"} err="failed to get container status \"b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5\": rpc error: code = NotFound desc = could not find container \"b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5\": container with ID starting with b27bf1abde19c14426388e19151d58892a3cfac6dbecbde2ad62ab44fdc749b5 not found: ID does not exist" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.688267 4745 scope.go:117] "RemoveContainer" containerID="8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66" Dec 08 00:11:18 crc kubenswrapper[4745]: E1208 00:11:18.688759 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66\": container with ID starting with 8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66 not found: ID does not exist" containerID="8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.688861 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66"} err="failed to get container status \"8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66\": rpc error: code = NotFound desc = could not find container \"8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66\": container with ID starting with 8cebaf1ac2e91a3f6654151e0e90732331658c0abc6c27e5c0650d55e1616c66 not found: ID does not exist" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.889444 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" path="/var/lib/kubelet/pods/463b12ee-0c67-452b-8a10-7330ec0224aa/volumes" Dec 08 00:11:18 crc kubenswrapper[4745]: I1208 00:11:18.968289 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:11:19 crc kubenswrapper[4745]: I1208 00:11:19.026982 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:11:19 crc kubenswrapper[4745]: I1208 00:11:19.118001 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:11:19 crc kubenswrapper[4745]: I1208 00:11:19.486508 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/certified-operators-k66hs" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="registry-server" probeResult="failure" output=< Dec 08 00:11:19 crc kubenswrapper[4745]: timeout: failed to connect service ":50051" within 1s Dec 08 00:11:19 crc kubenswrapper[4745]: > Dec 08 00:11:20 crc kubenswrapper[4745]: I1208 00:11:20.741998 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-b9kth"] Dec 08 00:11:20 crc kubenswrapper[4745]: I1208 00:11:20.891573 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:11:22 crc kubenswrapper[4745]: I1208 00:11:22.920830 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4r7rr"] Dec 08 00:11:22 crc kubenswrapper[4745]: I1208 00:11:22.921164 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4r7rr" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="registry-server" containerID="cri-o://5d908131e7ab1e78d3a7070c01e837525f65954ff6d784fe2f22d979ed445f78" gracePeriod=2 Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.649946 4745 generic.go:334] "Generic (PLEG): container finished" podID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerID="5d908131e7ab1e78d3a7070c01e837525f65954ff6d784fe2f22d979ed445f78" exitCode=0 Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.649990 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4r7rr" event={"ID":"b5657276-6ab2-4a60-aa98-b08b3828c1b8","Type":"ContainerDied","Data":"5d908131e7ab1e78d3a7070c01e837525f65954ff6d784fe2f22d979ed445f78"} Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.854159 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.969710 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-catalog-content\") pod \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.969860 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sl24\" (UniqueName: \"kubernetes.io/projected/b5657276-6ab2-4a60-aa98-b08b3828c1b8-kube-api-access-4sl24\") pod \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.969893 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-utilities\") pod \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\" (UID: \"b5657276-6ab2-4a60-aa98-b08b3828c1b8\") " Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.971039 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-utilities" (OuterVolumeSpecName: "utilities") pod "b5657276-6ab2-4a60-aa98-b08b3828c1b8" (UID: "b5657276-6ab2-4a60-aa98-b08b3828c1b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:23 crc kubenswrapper[4745]: I1208 00:11:23.977121 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5657276-6ab2-4a60-aa98-b08b3828c1b8-kube-api-access-4sl24" (OuterVolumeSpecName: "kube-api-access-4sl24") pod "b5657276-6ab2-4a60-aa98-b08b3828c1b8" (UID: "b5657276-6ab2-4a60-aa98-b08b3828c1b8"). InnerVolumeSpecName "kube-api-access-4sl24". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.014856 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5657276-6ab2-4a60-aa98-b08b3828c1b8" (UID: "b5657276-6ab2-4a60-aa98-b08b3828c1b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.071766 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.071824 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sl24\" (UniqueName: \"kubernetes.io/projected/b5657276-6ab2-4a60-aa98-b08b3828c1b8-kube-api-access-4sl24\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.071842 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5657276-6ab2-4a60-aa98-b08b3828c1b8-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.660462 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4r7rr" event={"ID":"b5657276-6ab2-4a60-aa98-b08b3828c1b8","Type":"ContainerDied","Data":"825bd23548f68c37a5467494c80290646d75e58e37d3ab29390eb5fc6b892287"} Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.660553 4745 scope.go:117] "RemoveContainer" containerID="5d908131e7ab1e78d3a7070c01e837525f65954ff6d784fe2f22d979ed445f78" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.660620 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4r7rr" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.695286 4745 scope.go:117] "RemoveContainer" containerID="f3a5a92ab6bdef98da7f7d7366d1b412fcbdfa9d393b26c60702492c7da3caf9" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.699256 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4r7rr"] Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.706055 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4r7rr"] Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.719721 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqrlr"] Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.720017 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pqrlr" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="registry-server" containerID="cri-o://d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d" gracePeriod=2 Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.724213 4745 scope.go:117] "RemoveContainer" containerID="6886b3bcf75c1f264f92922a4fade79e6d34b21f7425b7f90324229fe048c178" Dec 08 00:11:24 crc kubenswrapper[4745]: I1208 00:11:24.896325 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" path="/var/lib/kubelet/pods/b5657276-6ab2-4a60-aa98-b08b3828c1b8/volumes" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.137584 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.185140 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-catalog-content\") pod \"40aebf93-f654-463e-b4cd-c5f13850fee6\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.185346 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-utilities\") pod \"40aebf93-f654-463e-b4cd-c5f13850fee6\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.185394 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzhx5\" (UniqueName: \"kubernetes.io/projected/40aebf93-f654-463e-b4cd-c5f13850fee6-kube-api-access-tzhx5\") pod \"40aebf93-f654-463e-b4cd-c5f13850fee6\" (UID: \"40aebf93-f654-463e-b4cd-c5f13850fee6\") " Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.186793 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-utilities" (OuterVolumeSpecName: "utilities") pod "40aebf93-f654-463e-b4cd-c5f13850fee6" (UID: "40aebf93-f654-463e-b4cd-c5f13850fee6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.192896 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40aebf93-f654-463e-b4cd-c5f13850fee6-kube-api-access-tzhx5" (OuterVolumeSpecName: "kube-api-access-tzhx5") pod "40aebf93-f654-463e-b4cd-c5f13850fee6" (UID: "40aebf93-f654-463e-b4cd-c5f13850fee6"). InnerVolumeSpecName "kube-api-access-tzhx5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.202185 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "40aebf93-f654-463e-b4cd-c5f13850fee6" (UID: "40aebf93-f654-463e-b4cd-c5f13850fee6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.287080 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.287134 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzhx5\" (UniqueName: \"kubernetes.io/projected/40aebf93-f654-463e-b4cd-c5f13850fee6-kube-api-access-tzhx5\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.287161 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40aebf93-f654-463e-b4cd-c5f13850fee6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.674097 4745 generic.go:334] "Generic (PLEG): container finished" podID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerID="d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d" exitCode=0 Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.674199 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqrlr" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.674194 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqrlr" event={"ID":"40aebf93-f654-463e-b4cd-c5f13850fee6","Type":"ContainerDied","Data":"d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d"} Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.674413 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqrlr" event={"ID":"40aebf93-f654-463e-b4cd-c5f13850fee6","Type":"ContainerDied","Data":"236770113af3beaf9bdc4d440233df2458f5c6607584694139fab8e83b383285"} Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.674452 4745 scope.go:117] "RemoveContainer" containerID="d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.694412 4745 scope.go:117] "RemoveContainer" containerID="9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.714841 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqrlr"] Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.717873 4745 scope.go:117] "RemoveContainer" containerID="80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.721955 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqrlr"] Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.752967 4745 scope.go:117] "RemoveContainer" containerID="d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d" Dec 08 00:11:25 crc kubenswrapper[4745]: E1208 00:11:25.754977 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d\": container with ID starting with d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d not found: ID does not exist" containerID="d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.755013 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d"} err="failed to get container status \"d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d\": rpc error: code = NotFound desc = could not find container \"d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d\": container with ID starting with d0fabebd12b3de5ff444b1c41ade4e61dd7cccacfea4133ac331977fab02028d not found: ID does not exist" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.755038 4745 scope.go:117] "RemoveContainer" containerID="9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b" Dec 08 00:11:25 crc kubenswrapper[4745]: E1208 00:11:25.756365 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b\": container with ID starting with 9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b not found: ID does not exist" containerID="9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.756398 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b"} err="failed to get container status \"9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b\": rpc error: code = NotFound desc = could not find container \"9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b\": container with ID starting with 9738db66c1b2097a2aa2ed2145c35f8cef4083ce43d4fb1788f34a6e7ed1031b not found: ID does not exist" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.756420 4745 scope.go:117] "RemoveContainer" containerID="80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f" Dec 08 00:11:25 crc kubenswrapper[4745]: E1208 00:11:25.757077 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f\": container with ID starting with 80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f not found: ID does not exist" containerID="80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f" Dec 08 00:11:25 crc kubenswrapper[4745]: I1208 00:11:25.757101 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f"} err="failed to get container status \"80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f\": rpc error: code = NotFound desc = could not find container \"80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f\": container with ID starting with 80e346490c1dc99e8ec5741f04016abb79531acc3797f9b9272fedb0ca09409f not found: ID does not exist" Dec 08 00:11:26 crc kubenswrapper[4745]: I1208 00:11:26.903757 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" path="/var/lib/kubelet/pods/40aebf93-f654-463e-b4cd-c5f13850fee6/volumes" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.101238 4745 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.101921 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.101979 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.101995 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102005 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102024 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102035 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102050 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102060 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102076 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102087 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102105 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102114 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102126 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102136 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102151 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102162 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102175 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1f4efd-7af4-451c-bc57-150df9d0fc1d" containerName="pruner" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102185 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1f4efd-7af4-451c-bc57-150df9d0fc1d" containerName="pruner" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102200 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102210 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="extract-content" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102225 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102234 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102245 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="712511e1-14ba-4465-8050-02b8d5916f46" containerName="image-pruner" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102254 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="712511e1-14ba-4465-8050-02b8d5916f46" containerName="image-pruner" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102269 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102280 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.102300 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102310 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="extract-utilities" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102463 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="712511e1-14ba-4465-8050-02b8d5916f46" containerName="image-pruner" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102480 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="be800637-8860-4249-b97f-94c5c87aa8ed" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102496 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="463b12ee-0c67-452b-8a10-7330ec0224aa" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102508 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c1f4efd-7af4-451c-bc57-150df9d0fc1d" containerName="pruner" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102527 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5657276-6ab2-4a60-aa98-b08b3828c1b8" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.102540 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="40aebf93-f654-463e-b4cd-c5f13850fee6" containerName="registry-server" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.103065 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.103649 4745 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104076 4745 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104204 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6" gracePeriod=15 Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.104676 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104712 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.104743 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104762 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.104782 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104796 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.104810 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104824 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.104847 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104860 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.104884 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104897 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.104911 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.104976 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105226 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105245 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105280 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105301 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105318 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105506 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe" gracePeriod=15 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105522 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11" gracePeriod=15 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105553 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002" gracePeriod=15 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105615 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118" gracePeriod=15 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.105786 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.126369 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.126405 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.126434 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.126451 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.127276 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.127415 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.127471 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.127503 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228563 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228622 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228649 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228667 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228731 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228758 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228794 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228815 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228818 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228911 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228919 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228987 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.228993 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.229020 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.229045 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.229059 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.418322 4745 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.419084 4745 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.419649 4745 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.419904 4745 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.420530 4745 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.420637 4745 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.421243 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="200ms" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.509383 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.510226 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.510766 4745 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: E1208 00:11:28.622465 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="400ms" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.696851 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.699593 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.700980 4745 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe" exitCode=0 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.701023 4745 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118" exitCode=0 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.701037 4745 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11" exitCode=0 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.701050 4745 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002" exitCode=2 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.701171 4745 scope.go:117] "RemoveContainer" containerID="b732bcd2932fe6833b0055f93301dec3556080f248a56471aaf0f5b1e89bee12" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.703061 4745 generic.go:334] "Generic (PLEG): container finished" podID="5ae5f185-66f5-42c0-8a72-355923a94e40" containerID="5003433d2932953d97ccc443fa343a361d33cc147457474750c6a3504da20961" exitCode=0 Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.703110 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ae5f185-66f5-42c0-8a72-355923a94e40","Type":"ContainerDied","Data":"5003433d2932953d97ccc443fa343a361d33cc147457474750c6a3504da20961"} Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.703825 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.704193 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:28 crc kubenswrapper[4745]: I1208 00:11:28.704520 4745 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:29 crc kubenswrapper[4745]: E1208 00:11:29.023958 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="800ms" Dec 08 00:11:29 crc kubenswrapper[4745]: I1208 00:11:29.713069 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 00:11:29 crc kubenswrapper[4745]: E1208 00:11:29.826088 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="1.6s" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.008706 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.009326 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.009605 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.053964 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-kubelet-dir\") pod \"5ae5f185-66f5-42c0-8a72-355923a94e40\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.054074 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-var-lock\") pod \"5ae5f185-66f5-42c0-8a72-355923a94e40\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.054117 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae5f185-66f5-42c0-8a72-355923a94e40-kube-api-access\") pod \"5ae5f185-66f5-42c0-8a72-355923a94e40\" (UID: \"5ae5f185-66f5-42c0-8a72-355923a94e40\") " Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.054128 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5ae5f185-66f5-42c0-8a72-355923a94e40" (UID: "5ae5f185-66f5-42c0-8a72-355923a94e40"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.054566 4745 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.054805 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-var-lock" (OuterVolumeSpecName: "var-lock") pod "5ae5f185-66f5-42c0-8a72-355923a94e40" (UID: "5ae5f185-66f5-42c0-8a72-355923a94e40"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.060218 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ae5f185-66f5-42c0-8a72-355923a94e40-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5ae5f185-66f5-42c0-8a72-355923a94e40" (UID: "5ae5f185-66f5-42c0-8a72-355923a94e40"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.155272 4745 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5ae5f185-66f5-42c0-8a72-355923a94e40-var-lock\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.155317 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5ae5f185-66f5-42c0-8a72-355923a94e40-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.505720 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.506984 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.507702 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.508143 4745 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.508433 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560210 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560352 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560379 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560445 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560450 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560602 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560970 4745 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.560998 4745 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.561016 4745 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.729689 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.730886 4745 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6" exitCode=0 Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.731047 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.731067 4745 scope.go:117] "RemoveContainer" containerID="569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.733880 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5ae5f185-66f5-42c0-8a72-355923a94e40","Type":"ContainerDied","Data":"33b68909e488c81b8165738ea2f769e2f0b81726018bf1a763bb70527353dfbf"} Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.733936 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33b68909e488c81b8165738ea2f769e2f0b81726018bf1a763bb70527353dfbf" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.734045 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.763541 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.764031 4745 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.764336 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.765003 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.765140 4745 scope.go:117] "RemoveContainer" containerID="67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.765866 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.766574 4745 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.785440 4745 scope.go:117] "RemoveContainer" containerID="8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.809745 4745 scope.go:117] "RemoveContainer" containerID="d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.830141 4745 scope.go:117] "RemoveContainer" containerID="be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.862316 4745 scope.go:117] "RemoveContainer" containerID="f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.889160 4745 scope.go:117] "RemoveContainer" containerID="569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe" Dec 08 00:11:30 crc kubenswrapper[4745]: E1208 00:11:30.889657 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\": container with ID starting with 569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe not found: ID does not exist" containerID="569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.889715 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe"} err="failed to get container status \"569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\": rpc error: code = NotFound desc = could not find container \"569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe\": container with ID starting with 569c765feabd3f94950e3cfd100e577f532d84d21e4aa240dec94f2422b3bebe not found: ID does not exist" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.889742 4745 scope.go:117] "RemoveContainer" containerID="67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.891210 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 08 00:11:30 crc kubenswrapper[4745]: E1208 00:11:30.892365 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\": container with ID starting with 67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118 not found: ID does not exist" containerID="67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.892419 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118"} err="failed to get container status \"67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\": rpc error: code = NotFound desc = could not find container \"67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118\": container with ID starting with 67d3a9245f36aa361476ba06ac5bb1b307f355dbed38c1d5769c98b523e36118 not found: ID does not exist" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.892439 4745 scope.go:117] "RemoveContainer" containerID="8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11" Dec 08 00:11:30 crc kubenswrapper[4745]: E1208 00:11:30.892866 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\": container with ID starting with 8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11 not found: ID does not exist" containerID="8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.892922 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11"} err="failed to get container status \"8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\": rpc error: code = NotFound desc = could not find container \"8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11\": container with ID starting with 8ed9079b5134b69fec62f043560876b7f46cf88093a02ca6efb43bf9fc7fbb11 not found: ID does not exist" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.892979 4745 scope.go:117] "RemoveContainer" containerID="d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002" Dec 08 00:11:30 crc kubenswrapper[4745]: E1208 00:11:30.894611 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\": container with ID starting with d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002 not found: ID does not exist" containerID="d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.894681 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002"} err="failed to get container status \"d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\": rpc error: code = NotFound desc = could not find container \"d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002\": container with ID starting with d11fefcb2ffc871fb8bc5eabeae800b7220c40657b15dc850d11aa8d3a08f002 not found: ID does not exist" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.894725 4745 scope.go:117] "RemoveContainer" containerID="be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6" Dec 08 00:11:30 crc kubenswrapper[4745]: E1208 00:11:30.895771 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\": container with ID starting with be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6 not found: ID does not exist" containerID="be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.895918 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6"} err="failed to get container status \"be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\": rpc error: code = NotFound desc = could not find container \"be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6\": container with ID starting with be66a41289eeb62fb5e31c9775902619fd00db89a0473dc041b7e633f82ca3f6 not found: ID does not exist" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.895976 4745 scope.go:117] "RemoveContainer" containerID="f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448" Dec 08 00:11:30 crc kubenswrapper[4745]: E1208 00:11:30.896445 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\": container with ID starting with f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448 not found: ID does not exist" containerID="f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448" Dec 08 00:11:30 crc kubenswrapper[4745]: I1208 00:11:30.896480 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448"} err="failed to get container status \"f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\": rpc error: code = NotFound desc = could not find container \"f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448\": container with ID starting with f47ccd2ddf6e27ce04aa0c55436fbc7f9e3c8b687f0b037710d36bb6b9c85448 not found: ID does not exist" Dec 08 00:11:31 crc kubenswrapper[4745]: E1208 00:11:31.428578 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="3.2s" Dec 08 00:11:33 crc kubenswrapper[4745]: E1208 00:11:33.170723 4745 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.201:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:33 crc kubenswrapper[4745]: I1208 00:11:33.171372 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:33 crc kubenswrapper[4745]: W1208 00:11:33.204274 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-610ae9bce8d8e210d4d7046c93d6915e354c980acd7259f3386cd6505ae1698f WatchSource:0}: Error finding container 610ae9bce8d8e210d4d7046c93d6915e354c980acd7259f3386cd6505ae1698f: Status 404 returned error can't find the container with id 610ae9bce8d8e210d4d7046c93d6915e354c980acd7259f3386cd6505ae1698f Dec 08 00:11:33 crc kubenswrapper[4745]: E1208 00:11:33.208615 4745 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187f1505b4fdeff4 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-08 00:11:33.208084468 +0000 UTC m=+248.637290808,LastTimestamp:2025-12-08 00:11:33.208084468 +0000 UTC m=+248.637290808,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 08 00:11:33 crc kubenswrapper[4745]: I1208 00:11:33.760645 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"d58bc6ecfa86c1d86b536a33d69315b018e0b94ffd12ec7e6577eea1bf1149d0"} Dec 08 00:11:33 crc kubenswrapper[4745]: I1208 00:11:33.761491 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"610ae9bce8d8e210d4d7046c93d6915e354c980acd7259f3386cd6505ae1698f"} Dec 08 00:11:33 crc kubenswrapper[4745]: I1208 00:11:33.762469 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:33 crc kubenswrapper[4745]: E1208 00:11:33.762524 4745 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.201:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:11:33 crc kubenswrapper[4745]: I1208 00:11:33.762870 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:34 crc kubenswrapper[4745]: E1208 00:11:34.630499 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="6.4s" Dec 08 00:11:34 crc kubenswrapper[4745]: I1208 00:11:34.886598 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:34 crc kubenswrapper[4745]: I1208 00:11:34.886993 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:40 crc kubenswrapper[4745]: I1208 00:11:40.882341 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:40 crc kubenswrapper[4745]: I1208 00:11:40.883570 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:40 crc kubenswrapper[4745]: I1208 00:11:40.884208 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:40 crc kubenswrapper[4745]: I1208 00:11:40.901398 4745 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:40 crc kubenswrapper[4745]: I1208 00:11:40.901429 4745 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:40 crc kubenswrapper[4745]: E1208 00:11:40.901863 4745 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:40 crc kubenswrapper[4745]: I1208 00:11:40.902575 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:41 crc kubenswrapper[4745]: E1208 00:11:41.032025 4745 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="7s" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.812630 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.812686 4745 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb" exitCode=1 Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.812748 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb"} Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.813288 4745 scope.go:117] "RemoveContainer" containerID="019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.814358 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.815072 4745 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.815502 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.816987 4745 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="63478e39b23de7033563666b978cc0e02b951891b3bd4d31a7102954bd3941d5" exitCode=0 Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.817073 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"63478e39b23de7033563666b978cc0e02b951891b3bd4d31a7102954bd3941d5"} Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.817175 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"75fbfd24c5e9fbacbaad4c134d5d7fce74dbafd3d8cc53a2ffcc51a64abd6ff3"} Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.817636 4745 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.817669 4745 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.818168 4745 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:41 crc kubenswrapper[4745]: E1208 00:11:41.818191 4745 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.818650 4745 status_manager.go:851] "Failed to get status for pod" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" pod="openshift-marketplace/certified-operators-k66hs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-k66hs\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:41 crc kubenswrapper[4745]: I1208 00:11:41.819040 4745 status_manager.go:851] "Failed to get status for pod" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 08 00:11:42 crc kubenswrapper[4745]: I1208 00:11:42.828223 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 00:11:42 crc kubenswrapper[4745]: I1208 00:11:42.828688 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1d7eb60b5b77531d94037cdaf8e72be6025e1f379b7bac71d5b5f6f42657ed4d"} Dec 08 00:11:42 crc kubenswrapper[4745]: I1208 00:11:42.832612 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b1a9309b205437ed6f6c4bdf878d2326fdd1944f9768efa07f72082060043bd5"} Dec 08 00:11:42 crc kubenswrapper[4745]: I1208 00:11:42.832669 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e56d968b7405b1731392169a6fc4b3c98c1c6ac70d957a8395f789265bfa2a41"} Dec 08 00:11:42 crc kubenswrapper[4745]: I1208 00:11:42.832684 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8d90347e183aa85dfe653f1c0a845acaf79b044d4b59e71e51bc5945f92c09fd"} Dec 08 00:11:42 crc kubenswrapper[4745]: I1208 00:11:42.832692 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b7da785776fe0885f0f5d0b68a2a2f0989328fcbaeeb8308522e545b7edaaf55"} Dec 08 00:11:43 crc kubenswrapper[4745]: I1208 00:11:43.285214 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:11:43 crc kubenswrapper[4745]: I1208 00:11:43.285657 4745 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 08 00:11:43 crc kubenswrapper[4745]: I1208 00:11:43.285789 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 08 00:11:43 crc kubenswrapper[4745]: I1208 00:11:43.843575 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ca980975eb5a104ee6b01281e782064697d609aa5f27cf74e37a7b7e95121524"} Dec 08 00:11:43 crc kubenswrapper[4745]: I1208 00:11:43.844333 4745 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:43 crc kubenswrapper[4745]: I1208 00:11:43.844363 4745 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:44 crc kubenswrapper[4745]: I1208 00:11:44.166129 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:11:45 crc kubenswrapper[4745]: I1208 00:11:45.771478 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" podUID="6a3eaabd-8f61-487b-83f8-e458dfa24673" containerName="oauth-openshift" containerID="cri-o://9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea" gracePeriod=15 Dec 08 00:11:45 crc kubenswrapper[4745]: I1208 00:11:45.903277 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:45 crc kubenswrapper[4745]: I1208 00:11:45.903329 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:45 crc kubenswrapper[4745]: I1208 00:11:45.908873 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.180095 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.194820 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-idp-0-file-data\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.194874 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-trusted-ca-bundle\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.194902 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-policies\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.194953 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-cliconfig\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195003 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-login\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195052 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-service-ca\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195081 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-serving-cert\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195139 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-provider-selection\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195166 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-dir\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195199 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-ocp-branding-template\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195224 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-error\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195249 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-session\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195277 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mfqw\" (UniqueName: \"kubernetes.io/projected/6a3eaabd-8f61-487b-83f8-e458dfa24673-kube-api-access-8mfqw\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195309 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-router-certs\") pod \"6a3eaabd-8f61-487b-83f8-e458dfa24673\" (UID: \"6a3eaabd-8f61-487b-83f8-e458dfa24673\") " Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195698 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.195823 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.196097 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.196239 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.197572 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.202890 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.203818 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.208733 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.210215 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.212410 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a3eaabd-8f61-487b-83f8-e458dfa24673-kube-api-access-8mfqw" (OuterVolumeSpecName: "kube-api-access-8mfqw") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "kube-api-access-8mfqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.213121 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.213570 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.215119 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.216670 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "6a3eaabd-8f61-487b-83f8-e458dfa24673" (UID: "6a3eaabd-8f61-487b-83f8-e458dfa24673"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.296652 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.297142 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.297307 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.297482 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mfqw\" (UniqueName: \"kubernetes.io/projected/6a3eaabd-8f61-487b-83f8-e458dfa24673-kube-api-access-8mfqw\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.297638 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.297785 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.297913 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.298079 4745 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.298225 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.298349 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.298470 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.298653 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.298807 4745 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a3eaabd-8f61-487b-83f8-e458dfa24673-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.299008 4745 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a3eaabd-8f61-487b-83f8-e458dfa24673-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.866296 4745 generic.go:334] "Generic (PLEG): container finished" podID="6a3eaabd-8f61-487b-83f8-e458dfa24673" containerID="9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea" exitCode=0 Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.866366 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" event={"ID":"6a3eaabd-8f61-487b-83f8-e458dfa24673","Type":"ContainerDied","Data":"9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea"} Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.866410 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" event={"ID":"6a3eaabd-8f61-487b-83f8-e458dfa24673","Type":"ContainerDied","Data":"6ed6e806ce9423f107e21e9f420439947ec4e51d9346634c316400fd6a4589bb"} Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.866439 4745 scope.go:117] "RemoveContainer" containerID="9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.867096 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-b9kth" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.909990 4745 scope.go:117] "RemoveContainer" containerID="9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea" Dec 08 00:11:46 crc kubenswrapper[4745]: E1208 00:11:46.910625 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea\": container with ID starting with 9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea not found: ID does not exist" containerID="9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea" Dec 08 00:11:46 crc kubenswrapper[4745]: I1208 00:11:46.910661 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea"} err="failed to get container status \"9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea\": rpc error: code = NotFound desc = could not find container \"9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea\": container with ID starting with 9bde71d17b77e17a96d05d22d288eea8309a5daf825947cdf5e0a6968baf17ea not found: ID does not exist" Dec 08 00:11:48 crc kubenswrapper[4745]: I1208 00:11:48.855963 4745 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:48 crc kubenswrapper[4745]: I1208 00:11:48.878623 4745 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:48 crc kubenswrapper[4745]: I1208 00:11:48.878672 4745 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:48 crc kubenswrapper[4745]: I1208 00:11:48.878914 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:48 crc kubenswrapper[4745]: I1208 00:11:48.892314 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:11:48 crc kubenswrapper[4745]: I1208 00:11:48.895148 4745 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c63f7bf6-f83b-4ddb-8540-f63803b642dc" Dec 08 00:11:49 crc kubenswrapper[4745]: I1208 00:11:49.885124 4745 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:49 crc kubenswrapper[4745]: I1208 00:11:49.885455 4745 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:50 crc kubenswrapper[4745]: I1208 00:11:50.891526 4745 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:50 crc kubenswrapper[4745]: I1208 00:11:50.891579 4745 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f085d3aa-7b24-4491-9503-81796e0b68d8" Dec 08 00:11:53 crc kubenswrapper[4745]: I1208 00:11:53.285872 4745 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 08 00:11:53 crc kubenswrapper[4745]: I1208 00:11:53.286289 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 08 00:11:54 crc kubenswrapper[4745]: I1208 00:11:54.903075 4745 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c63f7bf6-f83b-4ddb-8540-f63803b642dc" Dec 08 00:11:59 crc kubenswrapper[4745]: I1208 00:11:59.716484 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.045677 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.106920 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.123748 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.283628 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.319604 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.820912 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.838956 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 08 00:12:00 crc kubenswrapper[4745]: I1208 00:12:00.957250 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.046572 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.153720 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.158866 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.229580 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.231166 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.288091 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.407628 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.512311 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.574619 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.740890 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.751092 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 08 00:12:01 crc kubenswrapper[4745]: I1208 00:12:01.788027 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.048793 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.052270 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.156466 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.211306 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.288638 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.335739 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.379905 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.513279 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.642543 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.659858 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.684872 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.732632 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.906052 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 08 00:12:02 crc kubenswrapper[4745]: I1208 00:12:02.912606 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.107365 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.284886 4745 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.284970 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.285041 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.285597 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"1d7eb60b5b77531d94037cdaf8e72be6025e1f379b7bac71d5b5f6f42657ed4d"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.285703 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://1d7eb60b5b77531d94037cdaf8e72be6025e1f379b7bac71d5b5f6f42657ed4d" gracePeriod=30 Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.295612 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.307137 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.460062 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.496289 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.557455 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.590422 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.623724 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.818856 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 08 00:12:03 crc kubenswrapper[4745]: I1208 00:12:03.896080 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.055819 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.116302 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.245124 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.342147 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.379031 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.413509 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.420118 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.542624 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.552050 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.564849 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.592783 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.606754 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.642839 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.669278 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.944590 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 08 00:12:04 crc kubenswrapper[4745]: I1208 00:12:04.960258 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.020590 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.180182 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.337835 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.365356 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.374695 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.462691 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.484530 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.601246 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.601705 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.601853 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.617574 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.727212 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.814674 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.920628 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 08 00:12:05 crc kubenswrapper[4745]: I1208 00:12:05.980961 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.018350 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.070115 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.083168 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.147003 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.322095 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.352137 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.369256 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.386744 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.395854 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.419281 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.483721 4745 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.518585 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.687610 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 08 00:12:06 crc kubenswrapper[4745]: I1208 00:12:06.835515 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.109716 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.160582 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.183260 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.201742 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.210688 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.248156 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.295460 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.305227 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.320787 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.363908 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.525502 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.534828 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.588918 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.655493 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.681263 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.735512 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.819535 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 00:12:07 crc kubenswrapper[4745]: I1208 00:12:07.942461 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.010271 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.091283 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.094352 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.117030 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.238838 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.388623 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.462261 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.556049 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.567667 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.715367 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.783789 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.791281 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.792216 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.814723 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.868955 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.890614 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.971262 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 08 00:12:08 crc kubenswrapper[4745]: I1208 00:12:08.975786 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.136001 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.177852 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.201234 4745 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.220563 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.270417 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.323877 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.334976 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.340084 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.358500 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.381502 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.381850 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.458217 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.474009 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.545803 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.623641 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.699104 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.718298 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.767673 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.841721 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.849246 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.982668 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 08 00:12:09 crc kubenswrapper[4745]: I1208 00:12:09.996452 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.040391 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.042233 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.055442 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.196451 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.198675 4745 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.209905 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.228671 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.268459 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.283737 4745 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.393179 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.454596 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.495283 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.509654 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.524073 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.524834 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.704539 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.727428 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.735569 4745 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.878727 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.934758 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 08 00:12:10 crc kubenswrapper[4745]: I1208 00:12:10.989394 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.025819 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.051469 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.114545 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.119099 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.149682 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.155435 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.207835 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.209672 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.295080 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.376877 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.533467 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.541139 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.560030 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.688156 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.726648 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.783519 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.817994 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.868667 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 08 00:12:11 crc kubenswrapper[4745]: I1208 00:12:11.950998 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.088063 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.134636 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.150600 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.162278 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.177916 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.187783 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.194912 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.296630 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.403479 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.428494 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.536190 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.549895 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.551927 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.570529 4745 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.572403 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.575150 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-b9kth"] Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.575219 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.581381 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.592587 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=24.592565753 podStartE2EDuration="24.592565753s" podCreationTimestamp="2025-12-08 00:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:12:12.589805537 +0000 UTC m=+288.019011847" watchObservedRunningTime="2025-12-08 00:12:12.592565753 +0000 UTC m=+288.021772063" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.682706 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.734680 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.739742 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.863239 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.883567 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.894993 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a3eaabd-8f61-487b-83f8-e458dfa24673" path="/var/lib/kubelet/pods/6a3eaabd-8f61-487b-83f8-e458dfa24673/volumes" Dec 08 00:12:12 crc kubenswrapper[4745]: I1208 00:12:12.943861 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.088658 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.152401 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.175479 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.220651 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.281156 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.412851 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.577650 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.632549 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.705649 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.720554 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.944764 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 08 00:12:13 crc kubenswrapper[4745]: I1208 00:12:13.998927 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.135838 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.173692 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.201552 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.219160 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.509946 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.580079 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.607170 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.675869 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.749071 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.819012 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 08 00:12:14 crc kubenswrapper[4745]: I1208 00:12:14.926493 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 08 00:12:15 crc kubenswrapper[4745]: I1208 00:12:15.095599 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 08 00:12:15 crc kubenswrapper[4745]: I1208 00:12:15.802321 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 08 00:12:16 crc kubenswrapper[4745]: I1208 00:12:16.290348 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 00:12:16 crc kubenswrapper[4745]: I1208 00:12:16.989996 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 08 00:12:17 crc kubenswrapper[4745]: I1208 00:12:17.162701 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.650881 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6f96647944-wtsk5"] Dec 08 00:12:21 crc kubenswrapper[4745]: E1208 00:12:21.652036 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a3eaabd-8f61-487b-83f8-e458dfa24673" containerName="oauth-openshift" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.652055 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a3eaabd-8f61-487b-83f8-e458dfa24673" containerName="oauth-openshift" Dec 08 00:12:21 crc kubenswrapper[4745]: E1208 00:12:21.652080 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" containerName="installer" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.652090 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" containerName="installer" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.652243 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ae5f185-66f5-42c0-8a72-355923a94e40" containerName="installer" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.652266 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a3eaabd-8f61-487b-83f8-e458dfa24673" containerName="oauth-openshift" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.652851 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.655461 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.656022 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.656700 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.656908 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.656999 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.657107 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.657002 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.657827 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.658896 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.659124 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.659128 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.660107 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.664900 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6f96647944-wtsk5"] Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.665810 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.666192 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.673501 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690194 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-login\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690255 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690284 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsztf\" (UniqueName: \"kubernetes.io/projected/f5242b42-1db2-415e-be7b-1bdada6ca943-kube-api-access-rsztf\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690328 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690363 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5242b42-1db2-415e-be7b-1bdada6ca943-audit-dir\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690389 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690416 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690455 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690544 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690607 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-session\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690748 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-error\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690784 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690843 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-audit-policies\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.690897 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792492 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792534 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792556 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsztf\" (UniqueName: \"kubernetes.io/projected/f5242b42-1db2-415e-be7b-1bdada6ca943-kube-api-access-rsztf\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792579 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5242b42-1db2-415e-be7b-1bdada6ca943-audit-dir\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792598 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792616 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792638 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792657 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792675 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-session\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792687 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f5242b42-1db2-415e-be7b-1bdada6ca943-audit-dir\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792724 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-error\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792754 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792789 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-audit-policies\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792814 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.792843 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-login\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.793519 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.793552 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-audit-policies\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.794486 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.795059 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.798713 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-error\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.798978 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.798999 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-login\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.799324 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-session\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.799393 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.800119 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.800832 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.801613 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f5242b42-1db2-415e-be7b-1bdada6ca943-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.814917 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsztf\" (UniqueName: \"kubernetes.io/projected/f5242b42-1db2-415e-be7b-1bdada6ca943-kube-api-access-rsztf\") pod \"oauth-openshift-6f96647944-wtsk5\" (UID: \"f5242b42-1db2-415e-be7b-1bdada6ca943\") " pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:21 crc kubenswrapper[4745]: I1208 00:12:21.977374 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:22 crc kubenswrapper[4745]: I1208 00:12:22.185087 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6f96647944-wtsk5"] Dec 08 00:12:22 crc kubenswrapper[4745]: I1208 00:12:22.486679 4745 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 08 00:12:22 crc kubenswrapper[4745]: I1208 00:12:22.487419 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://d58bc6ecfa86c1d86b536a33d69315b018e0b94ffd12ec7e6577eea1bf1149d0" gracePeriod=5 Dec 08 00:12:23 crc kubenswrapper[4745]: I1208 00:12:23.071652 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" event={"ID":"f5242b42-1db2-415e-be7b-1bdada6ca943","Type":"ContainerStarted","Data":"4e876167016b9e644dabad0aaa90547a9cbe9b42b07dd43d44e9c9d23e227dfc"} Dec 08 00:12:23 crc kubenswrapper[4745]: I1208 00:12:23.071750 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" event={"ID":"f5242b42-1db2-415e-be7b-1bdada6ca943","Type":"ContainerStarted","Data":"1fe354cb85a9bb3c9eb4ace6a4a3266750e7351624911f59ca747298b37ed4cc"} Dec 08 00:12:23 crc kubenswrapper[4745]: I1208 00:12:23.072066 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:23 crc kubenswrapper[4745]: I1208 00:12:23.082186 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" Dec 08 00:12:23 crc kubenswrapper[4745]: I1208 00:12:23.100347 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6f96647944-wtsk5" podStartSLOduration=63.100329316 podStartE2EDuration="1m3.100329316s" podCreationTimestamp="2025-12-08 00:11:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:12:23.098263692 +0000 UTC m=+298.527470052" watchObservedRunningTime="2025-12-08 00:12:23.100329316 +0000 UTC m=+298.529535616" Dec 08 00:12:24 crc kubenswrapper[4745]: I1208 00:12:24.714651 4745 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.101722 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.102127 4745 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="d58bc6ecfa86c1d86b536a33d69315b018e0b94ffd12ec7e6577eea1bf1149d0" exitCode=137 Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.629302 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.629813 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684332 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684439 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684484 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684587 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684654 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684785 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684835 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.684832 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.685013 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.685396 4745 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.685435 4745 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.685466 4745 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.685493 4745 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.699332 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.786234 4745 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:28 crc kubenswrapper[4745]: I1208 00:12:28.899396 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 08 00:12:29 crc kubenswrapper[4745]: I1208 00:12:29.110198 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 08 00:12:29 crc kubenswrapper[4745]: I1208 00:12:29.110304 4745 scope.go:117] "RemoveContainer" containerID="d58bc6ecfa86c1d86b536a33d69315b018e0b94ffd12ec7e6577eea1bf1149d0" Dec 08 00:12:29 crc kubenswrapper[4745]: I1208 00:12:29.110365 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 08 00:12:34 crc kubenswrapper[4745]: I1208 00:12:34.159628 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 08 00:12:34 crc kubenswrapper[4745]: I1208 00:12:34.162823 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 08 00:12:34 crc kubenswrapper[4745]: I1208 00:12:34.162889 4745 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="1d7eb60b5b77531d94037cdaf8e72be6025e1f379b7bac71d5b5f6f42657ed4d" exitCode=137 Dec 08 00:12:34 crc kubenswrapper[4745]: I1208 00:12:34.162952 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"1d7eb60b5b77531d94037cdaf8e72be6025e1f379b7bac71d5b5f6f42657ed4d"} Dec 08 00:12:34 crc kubenswrapper[4745]: I1208 00:12:34.162983 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4e3da1897000d41962ebd9346fac82717ba10501014db789eb9c074fe5c8d970"} Dec 08 00:12:34 crc kubenswrapper[4745]: I1208 00:12:34.163022 4745 scope.go:117] "RemoveContainer" containerID="019122c2444d8265d8dbd2e793bc915ec7fb3ce0e1ec2707c104ca9b6c6a70cb" Dec 08 00:12:34 crc kubenswrapper[4745]: I1208 00:12:34.165360 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:12:35 crc kubenswrapper[4745]: I1208 00:12:35.170773 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 08 00:12:43 crc kubenswrapper[4745]: I1208 00:12:43.285438 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:12:43 crc kubenswrapper[4745]: I1208 00:12:43.292621 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:12:44 crc kubenswrapper[4745]: I1208 00:12:44.169871 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.645466 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7xchq"] Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.646094 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd"] Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.646273 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" podUID="623b96d9-884a-4dea-a6a9-301f384d8666" containerName="route-controller-manager" containerID="cri-o://49dd054dc178c4f9773ba203591976075c6c24ce3725895d163e8f7ffae686e5" gracePeriod=30 Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.646487 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" podUID="82d71418-4a49-437a-8429-1f0569d205b0" containerName="controller-manager" containerID="cri-o://fbbb504f347f733254f6f29984d236b9a34b1d97590023922dfe7b1203b2d0cb" gracePeriod=30 Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.838619 4745 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7xchq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.838920 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" podUID="82d71418-4a49-437a-8429-1f0569d205b0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.925235 4745 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-sqqdd container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 08 00:12:50 crc kubenswrapper[4745]: I1208 00:12:50.925317 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" podUID="623b96d9-884a-4dea-a6a9-301f384d8666" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.267526 4745 generic.go:334] "Generic (PLEG): container finished" podID="623b96d9-884a-4dea-a6a9-301f384d8666" containerID="49dd054dc178c4f9773ba203591976075c6c24ce3725895d163e8f7ffae686e5" exitCode=0 Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.267610 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" event={"ID":"623b96d9-884a-4dea-a6a9-301f384d8666","Type":"ContainerDied","Data":"49dd054dc178c4f9773ba203591976075c6c24ce3725895d163e8f7ffae686e5"} Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.269634 4745 generic.go:334] "Generic (PLEG): container finished" podID="82d71418-4a49-437a-8429-1f0569d205b0" containerID="fbbb504f347f733254f6f29984d236b9a34b1d97590023922dfe7b1203b2d0cb" exitCode=0 Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.269681 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" event={"ID":"82d71418-4a49-437a-8429-1f0569d205b0","Type":"ContainerDied","Data":"fbbb504f347f733254f6f29984d236b9a34b1d97590023922dfe7b1203b2d0cb"} Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.548057 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.561484 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.589686 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-client-ca\") pod \"623b96d9-884a-4dea-a6a9-301f384d8666\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.589747 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-config\") pod \"623b96d9-884a-4dea-a6a9-301f384d8666\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.589769 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-config\") pod \"82d71418-4a49-437a-8429-1f0569d205b0\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.589823 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d71418-4a49-437a-8429-1f0569d205b0-serving-cert\") pod \"82d71418-4a49-437a-8429-1f0569d205b0\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.589877 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-client-ca\") pod \"82d71418-4a49-437a-8429-1f0569d205b0\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.590198 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2dnz\" (UniqueName: \"kubernetes.io/projected/623b96d9-884a-4dea-a6a9-301f384d8666-kube-api-access-v2dnz\") pod \"623b96d9-884a-4dea-a6a9-301f384d8666\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.590277 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-proxy-ca-bundles\") pod \"82d71418-4a49-437a-8429-1f0569d205b0\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.590302 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nx6jb\" (UniqueName: \"kubernetes.io/projected/82d71418-4a49-437a-8429-1f0569d205b0-kube-api-access-nx6jb\") pod \"82d71418-4a49-437a-8429-1f0569d205b0\" (UID: \"82d71418-4a49-437a-8429-1f0569d205b0\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.590330 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/623b96d9-884a-4dea-a6a9-301f384d8666-serving-cert\") pod \"623b96d9-884a-4dea-a6a9-301f384d8666\" (UID: \"623b96d9-884a-4dea-a6a9-301f384d8666\") " Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.590425 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-client-ca" (OuterVolumeSpecName: "client-ca") pod "623b96d9-884a-4dea-a6a9-301f384d8666" (UID: "623b96d9-884a-4dea-a6a9-301f384d8666"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.590728 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.590895 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-client-ca" (OuterVolumeSpecName: "client-ca") pod "82d71418-4a49-437a-8429-1f0569d205b0" (UID: "82d71418-4a49-437a-8429-1f0569d205b0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.591368 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-config" (OuterVolumeSpecName: "config") pod "623b96d9-884a-4dea-a6a9-301f384d8666" (UID: "623b96d9-884a-4dea-a6a9-301f384d8666"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.591538 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "82d71418-4a49-437a-8429-1f0569d205b0" (UID: "82d71418-4a49-437a-8429-1f0569d205b0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.592500 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-config" (OuterVolumeSpecName: "config") pod "82d71418-4a49-437a-8429-1f0569d205b0" (UID: "82d71418-4a49-437a-8429-1f0569d205b0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.596834 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82d71418-4a49-437a-8429-1f0569d205b0-kube-api-access-nx6jb" (OuterVolumeSpecName: "kube-api-access-nx6jb") pod "82d71418-4a49-437a-8429-1f0569d205b0" (UID: "82d71418-4a49-437a-8429-1f0569d205b0"). InnerVolumeSpecName "kube-api-access-nx6jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.598395 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/623b96d9-884a-4dea-a6a9-301f384d8666-kube-api-access-v2dnz" (OuterVolumeSpecName: "kube-api-access-v2dnz") pod "623b96d9-884a-4dea-a6a9-301f384d8666" (UID: "623b96d9-884a-4dea-a6a9-301f384d8666"). InnerVolumeSpecName "kube-api-access-v2dnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.599267 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/623b96d9-884a-4dea-a6a9-301f384d8666-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "623b96d9-884a-4dea-a6a9-301f384d8666" (UID: "623b96d9-884a-4dea-a6a9-301f384d8666"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.605012 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82d71418-4a49-437a-8429-1f0569d205b0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "82d71418-4a49-437a-8429-1f0569d205b0" (UID: "82d71418-4a49-437a-8429-1f0569d205b0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.691874 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2dnz\" (UniqueName: \"kubernetes.io/projected/623b96d9-884a-4dea-a6a9-301f384d8666-kube-api-access-v2dnz\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.692229 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.692239 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nx6jb\" (UniqueName: \"kubernetes.io/projected/82d71418-4a49-437a-8429-1f0569d205b0-kube-api-access-nx6jb\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.692249 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/623b96d9-884a-4dea-a6a9-301f384d8666-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.692258 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/623b96d9-884a-4dea-a6a9-301f384d8666-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.692269 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.692284 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82d71418-4a49-437a-8429-1f0569d205b0-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:51 crc kubenswrapper[4745]: I1208 00:12:51.692329 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/82d71418-4a49-437a-8429-1f0569d205b0-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.276461 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" event={"ID":"623b96d9-884a-4dea-a6a9-301f384d8666","Type":"ContainerDied","Data":"beacd63ec355550991859f6c01c643f7e14e4ac1f1fe90f6a7c433f37d45f335"} Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.276486 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.276512 4745 scope.go:117] "RemoveContainer" containerID="49dd054dc178c4f9773ba203591976075c6c24ce3725895d163e8f7ffae686e5" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.278462 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" event={"ID":"82d71418-4a49-437a-8429-1f0569d205b0","Type":"ContainerDied","Data":"1a840b2fa2d6d7184ddcf31ceb99131abfe8022d3c454b3a4e1c8da720bd496c"} Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.278542 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7xchq" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.297409 4745 scope.go:117] "RemoveContainer" containerID="fbbb504f347f733254f6f29984d236b9a34b1d97590023922dfe7b1203b2d0cb" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.312554 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd"] Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.319133 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-sqqdd"] Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.322741 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7xchq"] Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.326525 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7xchq"] Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.460858 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.461186 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.671458 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc"] Dec 08 00:12:52 crc kubenswrapper[4745]: E1208 00:12:52.671814 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82d71418-4a49-437a-8429-1f0569d205b0" containerName="controller-manager" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.671845 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="82d71418-4a49-437a-8429-1f0569d205b0" containerName="controller-manager" Dec 08 00:12:52 crc kubenswrapper[4745]: E1208 00:12:52.671865 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="623b96d9-884a-4dea-a6a9-301f384d8666" containerName="route-controller-manager" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.671875 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="623b96d9-884a-4dea-a6a9-301f384d8666" containerName="route-controller-manager" Dec 08 00:12:52 crc kubenswrapper[4745]: E1208 00:12:52.671894 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.671904 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.672057 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.672075 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="82d71418-4a49-437a-8429-1f0569d205b0" containerName="controller-manager" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.672084 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="623b96d9-884a-4dea-a6a9-301f384d8666" containerName="route-controller-manager" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.672622 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.674318 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.675137 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.676058 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.676046 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66b645584c-vqw5t"] Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.676504 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.676876 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.677485 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.677659 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.679517 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.679705 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.679969 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.680183 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.680532 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.682053 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.689797 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702615 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-config\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702672 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-proxy-ca-bundles\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702701 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czstx\" (UniqueName: \"kubernetes.io/projected/5e2f7754-e515-4e1d-8aa4-279569aac0e6-kube-api-access-czstx\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702752 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2f7754-e515-4e1d-8aa4-279569aac0e6-serving-cert\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702780 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-serving-cert\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702812 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-client-ca\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702837 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-config\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702854 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-client-ca\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.702883 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4fhg\" (UniqueName: \"kubernetes.io/projected/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-kube-api-access-w4fhg\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.720077 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66b645584c-vqw5t"] Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.734611 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc"] Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804412 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-proxy-ca-bundles\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804458 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czstx\" (UniqueName: \"kubernetes.io/projected/5e2f7754-e515-4e1d-8aa4-279569aac0e6-kube-api-access-czstx\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804517 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2f7754-e515-4e1d-8aa4-279569aac0e6-serving-cert\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804541 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-serving-cert\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804564 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-client-ca\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804584 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-config\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804604 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-client-ca\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804629 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4fhg\" (UniqueName: \"kubernetes.io/projected/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-kube-api-access-w4fhg\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.804692 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-config\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.805802 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-proxy-ca-bundles\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.806129 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-client-ca\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.806138 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-client-ca\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.806295 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-config\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.806576 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-config\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.812576 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-serving-cert\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.812589 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2f7754-e515-4e1d-8aa4-279569aac0e6-serving-cert\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.822659 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4fhg\" (UniqueName: \"kubernetes.io/projected/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-kube-api-access-w4fhg\") pod \"route-controller-manager-64bdfb6ffc-l75mc\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.825653 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czstx\" (UniqueName: \"kubernetes.io/projected/5e2f7754-e515-4e1d-8aa4-279569aac0e6-kube-api-access-czstx\") pod \"controller-manager-66b645584c-vqw5t\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.892739 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="623b96d9-884a-4dea-a6a9-301f384d8666" path="/var/lib/kubelet/pods/623b96d9-884a-4dea-a6a9-301f384d8666/volumes" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.893805 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82d71418-4a49-437a-8429-1f0569d205b0" path="/var/lib/kubelet/pods/82d71418-4a49-437a-8429-1f0569d205b0/volumes" Dec 08 00:12:52 crc kubenswrapper[4745]: I1208 00:12:52.993428 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:53 crc kubenswrapper[4745]: I1208 00:12:53.011292 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:53 crc kubenswrapper[4745]: I1208 00:12:53.203960 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc"] Dec 08 00:12:53 crc kubenswrapper[4745]: I1208 00:12:53.244361 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66b645584c-vqw5t"] Dec 08 00:12:53 crc kubenswrapper[4745]: W1208 00:12:53.251631 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5e2f7754_e515_4e1d_8aa4_279569aac0e6.slice/crio-114df2a133f268cc4fe956f23c1a7b448e3af4c8dd9452dc0a0020062cbbaecf WatchSource:0}: Error finding container 114df2a133f268cc4fe956f23c1a7b448e3af4c8dd9452dc0a0020062cbbaecf: Status 404 returned error can't find the container with id 114df2a133f268cc4fe956f23c1a7b448e3af4c8dd9452dc0a0020062cbbaecf Dec 08 00:12:53 crc kubenswrapper[4745]: I1208 00:12:53.294428 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" event={"ID":"5e2f7754-e515-4e1d-8aa4-279569aac0e6","Type":"ContainerStarted","Data":"114df2a133f268cc4fe956f23c1a7b448e3af4c8dd9452dc0a0020062cbbaecf"} Dec 08 00:12:53 crc kubenswrapper[4745]: I1208 00:12:53.298693 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" event={"ID":"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2","Type":"ContainerStarted","Data":"d62ff8d63702e11ff18e496ca7f323e7df295e35cd9f48f90ea63a10590330ea"} Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.079707 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k66hs"] Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.080479 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k66hs" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="registry-server" containerID="cri-o://6f9f9c26f63630472ec5cecfaba5904811af5155b4124d6ebb48c86b2e93fc0a" gracePeriod=30 Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.089900 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fkwkz"] Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.090254 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fkwkz" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="registry-server" containerID="cri-o://35e94280b5fd53bf2fd868802813441e346ba6d8eb4f85c4f7c6fbc1aa3fa1a0" gracePeriod=30 Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.096290 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq629"] Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.096549 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" podUID="dc05c216-95a6-4890-9fc7-7eb70233e104" containerName="marketplace-operator" containerID="cri-o://481e1c2cf6e74344ab4bf9e42a5a609e2c6c9c358a0fa26c2cd461a07fd3d28d" gracePeriod=30 Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.114759 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-twwmd"] Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.116269 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-twwmd" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="registry-server" containerID="cri-o://81e157b01d1359df8bb200f2530fd9c6bc84f75c466f2a85cc130b9a982f2a60" gracePeriod=30 Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.121366 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6kk8"] Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.121622 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l6kk8" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="registry-server" containerID="cri-o://becf97841eb9b27d95aeb007c43ce5f9e462cca7f408bb68eecd5ad0f740ab25" gracePeriod=30 Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.142570 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-sz9zf"] Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.144678 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.163433 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-sz9zf"] Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.264525 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c5de8b9e-5510-4473-bd7b-e105e794b3be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.264900 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmbrw\" (UniqueName: \"kubernetes.io/projected/c5de8b9e-5510-4473-bd7b-e105e794b3be-kube-api-access-jmbrw\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.265064 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5de8b9e-5510-4473-bd7b-e105e794b3be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.316295 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" event={"ID":"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2","Type":"ContainerStarted","Data":"908b7ba52d7a827e666500f661a188de76498a6ab6ff6396d5922f7094a4147f"} Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.317679 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" event={"ID":"5e2f7754-e515-4e1d-8aa4-279569aac0e6","Type":"ContainerStarted","Data":"7a972abaf78fb8b29f3374d2e187554d8eea5d9bf43235a57ab93607d0cda933"} Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.336456 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" podStartSLOduration=6.336431602 podStartE2EDuration="6.336431602s" podCreationTimestamp="2025-12-08 00:12:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:12:56.332071873 +0000 UTC m=+331.761278183" watchObservedRunningTime="2025-12-08 00:12:56.336431602 +0000 UTC m=+331.765637912" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.365891 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c5de8b9e-5510-4473-bd7b-e105e794b3be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.366077 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmbrw\" (UniqueName: \"kubernetes.io/projected/c5de8b9e-5510-4473-bd7b-e105e794b3be-kube-api-access-jmbrw\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.366105 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5de8b9e-5510-4473-bd7b-e105e794b3be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.368368 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c5de8b9e-5510-4473-bd7b-e105e794b3be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.371957 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/c5de8b9e-5510-4473-bd7b-e105e794b3be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.387865 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmbrw\" (UniqueName: \"kubernetes.io/projected/c5de8b9e-5510-4473-bd7b-e105e794b3be-kube-api-access-jmbrw\") pod \"marketplace-operator-79b997595-sz9zf\" (UID: \"c5de8b9e-5510-4473-bd7b-e105e794b3be\") " pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.470746 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:56 crc kubenswrapper[4745]: I1208 00:12:56.706998 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-sz9zf"] Dec 08 00:12:56 crc kubenswrapper[4745]: W1208 00:12:56.716108 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5de8b9e_5510_4473_bd7b_e105e794b3be.slice/crio-84cbf486750e53d57a6be1ce3e3f2a22190369a26495c9933d2222582ed0d8eb WatchSource:0}: Error finding container 84cbf486750e53d57a6be1ce3e3f2a22190369a26495c9933d2222582ed0d8eb: Status 404 returned error can't find the container with id 84cbf486750e53d57a6be1ce3e3f2a22190369a26495c9933d2222582ed0d8eb Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.326552 4745 generic.go:334] "Generic (PLEG): container finished" podID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerID="35e94280b5fd53bf2fd868802813441e346ba6d8eb4f85c4f7c6fbc1aa3fa1a0" exitCode=0 Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.326632 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkwkz" event={"ID":"76ddbfcb-3447-4c68-a36e-fc310ab2f75b","Type":"ContainerDied","Data":"35e94280b5fd53bf2fd868802813441e346ba6d8eb4f85c4f7c6fbc1aa3fa1a0"} Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.328457 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" event={"ID":"c5de8b9e-5510-4473-bd7b-e105e794b3be","Type":"ContainerStarted","Data":"f8588709ff7a79d5f1611a27bf8fdc5c0986dcee342c388fddf1ff116bd23dc6"} Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.328482 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" event={"ID":"c5de8b9e-5510-4473-bd7b-e105e794b3be","Type":"ContainerStarted","Data":"84cbf486750e53d57a6be1ce3e3f2a22190369a26495c9933d2222582ed0d8eb"} Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.328709 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.329995 4745 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-sz9zf container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.61:8080/healthz\": dial tcp 10.217.0.61:8080: connect: connection refused" start-of-body= Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.330056 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" podUID="c5de8b9e-5510-4473-bd7b-e105e794b3be" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.61:8080/healthz\": dial tcp 10.217.0.61:8080: connect: connection refused" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.331407 4745 generic.go:334] "Generic (PLEG): container finished" podID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerID="81e157b01d1359df8bb200f2530fd9c6bc84f75c466f2a85cc130b9a982f2a60" exitCode=0 Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.331463 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-twwmd" event={"ID":"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0","Type":"ContainerDied","Data":"81e157b01d1359df8bb200f2530fd9c6bc84f75c466f2a85cc130b9a982f2a60"} Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.335177 4745 generic.go:334] "Generic (PLEG): container finished" podID="dc05c216-95a6-4890-9fc7-7eb70233e104" containerID="481e1c2cf6e74344ab4bf9e42a5a609e2c6c9c358a0fa26c2cd461a07fd3d28d" exitCode=0 Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.335256 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" event={"ID":"dc05c216-95a6-4890-9fc7-7eb70233e104","Type":"ContainerDied","Data":"481e1c2cf6e74344ab4bf9e42a5a609e2c6c9c358a0fa26c2cd461a07fd3d28d"} Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.339185 4745 generic.go:334] "Generic (PLEG): container finished" podID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerID="becf97841eb9b27d95aeb007c43ce5f9e462cca7f408bb68eecd5ad0f740ab25" exitCode=0 Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.339260 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6kk8" event={"ID":"e9bdca4d-dada-48b2-b9aa-43dd3801eb93","Type":"ContainerDied","Data":"becf97841eb9b27d95aeb007c43ce5f9e462cca7f408bb68eecd5ad0f740ab25"} Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.343062 4745 generic.go:334] "Generic (PLEG): container finished" podID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerID="6f9f9c26f63630472ec5cecfaba5904811af5155b4124d6ebb48c86b2e93fc0a" exitCode=0 Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.344082 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k66hs" event={"ID":"dd084f85-b44a-4016-9fbd-5f051c4e9a53","Type":"ContainerDied","Data":"6f9f9c26f63630472ec5cecfaba5904811af5155b4124d6ebb48c86b2e93fc0a"} Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.344124 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.344271 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.345468 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" podStartSLOduration=1.345456092 podStartE2EDuration="1.345456092s" podCreationTimestamp="2025-12-08 00:12:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:12:57.343730571 +0000 UTC m=+332.772936861" watchObservedRunningTime="2025-12-08 00:12:57.345456092 +0000 UTC m=+332.774662392" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.355971 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.356232 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.362030 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" podStartSLOduration=7.362016333 podStartE2EDuration="7.362016333s" podCreationTimestamp="2025-12-08 00:12:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:12:57.359368984 +0000 UTC m=+332.788575324" watchObservedRunningTime="2025-12-08 00:12:57.362016333 +0000 UTC m=+332.791222633" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.685278 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.746276 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.753393 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.772807 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.783865 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-catalog-content\") pod \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.783980 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75bj7\" (UniqueName: \"kubernetes.io/projected/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-kube-api-access-75bj7\") pod \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.784010 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6xq7\" (UniqueName: \"kubernetes.io/projected/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-kube-api-access-q6xq7\") pod \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.784027 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-catalog-content\") pod \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.784083 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqsvw\" (UniqueName: \"kubernetes.io/projected/dd084f85-b44a-4016-9fbd-5f051c4e9a53-kube-api-access-dqsvw\") pod \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.784108 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-utilities\") pod \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\" (UID: \"e9bdca4d-dada-48b2-b9aa-43dd3801eb93\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.784129 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-utilities\") pod \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\" (UID: \"dd084f85-b44a-4016-9fbd-5f051c4e9a53\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.784156 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-utilities\") pod \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.784171 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-catalog-content\") pod \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\" (UID: \"76ddbfcb-3447-4c68-a36e-fc310ab2f75b\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.785045 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-utilities" (OuterVolumeSpecName: "utilities") pod "e9bdca4d-dada-48b2-b9aa-43dd3801eb93" (UID: "e9bdca4d-dada-48b2-b9aa-43dd3801eb93"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.785372 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-utilities" (OuterVolumeSpecName: "utilities") pod "76ddbfcb-3447-4c68-a36e-fc310ab2f75b" (UID: "76ddbfcb-3447-4c68-a36e-fc310ab2f75b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.790969 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd084f85-b44a-4016-9fbd-5f051c4e9a53-kube-api-access-dqsvw" (OuterVolumeSpecName: "kube-api-access-dqsvw") pod "dd084f85-b44a-4016-9fbd-5f051c4e9a53" (UID: "dd084f85-b44a-4016-9fbd-5f051c4e9a53"). InnerVolumeSpecName "kube-api-access-dqsvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.791227 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-kube-api-access-75bj7" (OuterVolumeSpecName: "kube-api-access-75bj7") pod "e9bdca4d-dada-48b2-b9aa-43dd3801eb93" (UID: "e9bdca4d-dada-48b2-b9aa-43dd3801eb93"). InnerVolumeSpecName "kube-api-access-75bj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.791686 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-utilities" (OuterVolumeSpecName: "utilities") pod "dd084f85-b44a-4016-9fbd-5f051c4e9a53" (UID: "dd084f85-b44a-4016-9fbd-5f051c4e9a53"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.792715 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-kube-api-access-q6xq7" (OuterVolumeSpecName: "kube-api-access-q6xq7") pod "76ddbfcb-3447-4c68-a36e-fc310ab2f75b" (UID: "76ddbfcb-3447-4c68-a36e-fc310ab2f75b"). InnerVolumeSpecName "kube-api-access-q6xq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.792862 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.855044 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76ddbfcb-3447-4c68-a36e-fc310ab2f75b" (UID: "76ddbfcb-3447-4c68-a36e-fc310ab2f75b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.877496 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd084f85-b44a-4016-9fbd-5f051c4e9a53" (UID: "dd084f85-b44a-4016-9fbd-5f051c4e9a53"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.885494 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlpw2\" (UniqueName: \"kubernetes.io/projected/dc05c216-95a6-4890-9fc7-7eb70233e104-kube-api-access-mlpw2\") pod \"dc05c216-95a6-4890-9fc7-7eb70233e104\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.885684 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-operator-metrics\") pod \"dc05c216-95a6-4890-9fc7-7eb70233e104\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.885808 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-trusted-ca\") pod \"dc05c216-95a6-4890-9fc7-7eb70233e104\" (UID: \"dc05c216-95a6-4890-9fc7-7eb70233e104\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.885910 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hk8gj\" (UniqueName: \"kubernetes.io/projected/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-kube-api-access-hk8gj\") pod \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.886003 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-catalog-content\") pod \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.886429 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-utilities\") pod \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\" (UID: \"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0\") " Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.886610 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "dc05c216-95a6-4890-9fc7-7eb70233e104" (UID: "dc05c216-95a6-4890-9fc7-7eb70233e104"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.886849 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75bj7\" (UniqueName: \"kubernetes.io/projected/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-kube-api-access-75bj7\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.886942 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6xq7\" (UniqueName: \"kubernetes.io/projected/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-kube-api-access-q6xq7\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.887006 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.887066 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqsvw\" (UniqueName: \"kubernetes.io/projected/dd084f85-b44a-4016-9fbd-5f051c4e9a53-kube-api-access-dqsvw\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.887132 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.887190 4745 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.887245 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd084f85-b44a-4016-9fbd-5f051c4e9a53-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.887304 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.887359 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76ddbfcb-3447-4c68-a36e-fc310ab2f75b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.888146 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-kube-api-access-hk8gj" (OuterVolumeSpecName: "kube-api-access-hk8gj") pod "289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" (UID: "289a5ef6-f5f7-4225-9790-5cbc5c1bcee0"). InnerVolumeSpecName "kube-api-access-hk8gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.888271 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-utilities" (OuterVolumeSpecName: "utilities") pod "289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" (UID: "289a5ef6-f5f7-4225-9790-5cbc5c1bcee0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.888548 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc05c216-95a6-4890-9fc7-7eb70233e104-kube-api-access-mlpw2" (OuterVolumeSpecName: "kube-api-access-mlpw2") pod "dc05c216-95a6-4890-9fc7-7eb70233e104" (UID: "dc05c216-95a6-4890-9fc7-7eb70233e104"). InnerVolumeSpecName "kube-api-access-mlpw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.888595 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "dc05c216-95a6-4890-9fc7-7eb70233e104" (UID: "dc05c216-95a6-4890-9fc7-7eb70233e104"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.904710 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9bdca4d-dada-48b2-b9aa-43dd3801eb93" (UID: "e9bdca4d-dada-48b2-b9aa-43dd3801eb93"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.914293 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" (UID: "289a5ef6-f5f7-4225-9790-5cbc5c1bcee0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.989653 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlpw2\" (UniqueName: \"kubernetes.io/projected/dc05c216-95a6-4890-9fc7-7eb70233e104-kube-api-access-mlpw2\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.989688 4745 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dc05c216-95a6-4890-9fc7-7eb70233e104-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.989701 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hk8gj\" (UniqueName: \"kubernetes.io/projected/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-kube-api-access-hk8gj\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.989718 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.989730 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:57 crc kubenswrapper[4745]: I1208 00:12:57.989742 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9bdca4d-dada-48b2-b9aa-43dd3801eb93-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.351566 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-twwmd" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.353066 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-twwmd" event={"ID":"289a5ef6-f5f7-4225-9790-5cbc5c1bcee0","Type":"ContainerDied","Data":"09dd50cf9914e2abe80924985aab0597a35880dcbea7bc61a0fd8491c0334e2f"} Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.353123 4745 scope.go:117] "RemoveContainer" containerID="81e157b01d1359df8bb200f2530fd9c6bc84f75c466f2a85cc130b9a982f2a60" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.354837 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" event={"ID":"dc05c216-95a6-4890-9fc7-7eb70233e104","Type":"ContainerDied","Data":"b76eac5192a7cf6307f223736ab49d6785a891b99768489f32f72398552b1dce"} Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.355179 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hq629" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.359091 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l6kk8" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.359086 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l6kk8" event={"ID":"e9bdca4d-dada-48b2-b9aa-43dd3801eb93","Type":"ContainerDied","Data":"b01cf916fcc3a3fbe808bd9b188289aea14c7339eb4f17fa5674560ec24f3684"} Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.362725 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k66hs" event={"ID":"dd084f85-b44a-4016-9fbd-5f051c4e9a53","Type":"ContainerDied","Data":"3a633854a59b7bee0fa14ea423e0439b9d65cb16a9ae01b283768b1695a04feb"} Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.362838 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k66hs" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.368121 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkwkz" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.368653 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkwkz" event={"ID":"76ddbfcb-3447-4c68-a36e-fc310ab2f75b","Type":"ContainerDied","Data":"12a80f2b21942907ef94adb6472bb6b527be87680f1356753d9785620d2212d9"} Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.372756 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-sz9zf" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.374148 4745 scope.go:117] "RemoveContainer" containerID="16ed8ecb4bfac314d95c0a3f759628b8c9eeb70d097eecd6e41a0b6de47e3d42" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.392830 4745 scope.go:117] "RemoveContainer" containerID="0a9d9e98ef256d82bba214b27758236c2abce043235d6525796fa241712c394b" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.432053 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq629"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.432967 4745 scope.go:117] "RemoveContainer" containerID="481e1c2cf6e74344ab4bf9e42a5a609e2c6c9c358a0fa26c2cd461a07fd3d28d" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.434078 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq629"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.454737 4745 scope.go:117] "RemoveContainer" containerID="becf97841eb9b27d95aeb007c43ce5f9e462cca7f408bb68eecd5ad0f740ab25" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.467369 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fkwkz"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.472309 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fkwkz"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.474230 4745 scope.go:117] "RemoveContainer" containerID="cfcede1292d44c175a8ff9d5196ea47cb2e05f3c8318b6c3fe19d2b29a198a9a" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.482692 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l6kk8"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.490628 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l6kk8"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.494854 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k66hs"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.499506 4745 scope.go:117] "RemoveContainer" containerID="745825b08f28e38d8f78a18237ae44cb6716d9f47621c74d1d24a9c242df90d6" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.502153 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k66hs"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.505210 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-twwmd"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.510467 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-twwmd"] Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.518225 4745 scope.go:117] "RemoveContainer" containerID="6f9f9c26f63630472ec5cecfaba5904811af5155b4124d6ebb48c86b2e93fc0a" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.539286 4745 scope.go:117] "RemoveContainer" containerID="fa634ace9d401fdae82d6c71fe9ec63135302d1a83b4787b634d4b2366defebd" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.558784 4745 scope.go:117] "RemoveContainer" containerID="11c53739ca58873a1ad1e093dabac6bbadc64ea79bf3ac71bdb1e72aa2f2fb77" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.571851 4745 scope.go:117] "RemoveContainer" containerID="35e94280b5fd53bf2fd868802813441e346ba6d8eb4f85c4f7c6fbc1aa3fa1a0" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.583032 4745 scope.go:117] "RemoveContainer" containerID="c8280e042ea76030f566c02fa88ef58d113f8ae3975ede86e7cc9cc0a9b4cc08" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.598735 4745 scope.go:117] "RemoveContainer" containerID="538af921cea7869e325ef7f220c9c5dcee99792e80c7ae061e99dd6365bc6882" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.893451 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" path="/var/lib/kubelet/pods/289a5ef6-f5f7-4225-9790-5cbc5c1bcee0/volumes" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.894710 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" path="/var/lib/kubelet/pods/76ddbfcb-3447-4c68-a36e-fc310ab2f75b/volumes" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.895334 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc05c216-95a6-4890-9fc7-7eb70233e104" path="/var/lib/kubelet/pods/dc05c216-95a6-4890-9fc7-7eb70233e104/volumes" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.895783 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" path="/var/lib/kubelet/pods/dd084f85-b44a-4016-9fbd-5f051c4e9a53/volumes" Dec 08 00:12:58 crc kubenswrapper[4745]: I1208 00:12:58.896353 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" path="/var/lib/kubelet/pods/e9bdca4d-dada-48b2-b9aa-43dd3801eb93/volumes" Dec 08 00:13:00 crc kubenswrapper[4745]: I1208 00:13:00.559119 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66b645584c-vqw5t"] Dec 08 00:13:00 crc kubenswrapper[4745]: I1208 00:13:00.559389 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" podUID="5e2f7754-e515-4e1d-8aa4-279569aac0e6" containerName="controller-manager" containerID="cri-o://7a972abaf78fb8b29f3374d2e187554d8eea5d9bf43235a57ab93607d0cda933" gracePeriod=30 Dec 08 00:13:00 crc kubenswrapper[4745]: I1208 00:13:00.563905 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc"] Dec 08 00:13:00 crc kubenswrapper[4745]: I1208 00:13:00.564136 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" podUID="e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" containerName="route-controller-manager" containerID="cri-o://908b7ba52d7a827e666500f661a188de76498a6ab6ff6396d5922f7094a4147f" gracePeriod=30 Dec 08 00:13:01 crc kubenswrapper[4745]: I1208 00:13:01.392245 4745 generic.go:334] "Generic (PLEG): container finished" podID="5e2f7754-e515-4e1d-8aa4-279569aac0e6" containerID="7a972abaf78fb8b29f3374d2e187554d8eea5d9bf43235a57ab93607d0cda933" exitCode=0 Dec 08 00:13:01 crc kubenswrapper[4745]: I1208 00:13:01.392472 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" event={"ID":"5e2f7754-e515-4e1d-8aa4-279569aac0e6","Type":"ContainerDied","Data":"7a972abaf78fb8b29f3374d2e187554d8eea5d9bf43235a57ab93607d0cda933"} Dec 08 00:13:01 crc kubenswrapper[4745]: I1208 00:13:01.394771 4745 generic.go:334] "Generic (PLEG): container finished" podID="e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" containerID="908b7ba52d7a827e666500f661a188de76498a6ab6ff6396d5922f7094a4147f" exitCode=0 Dec 08 00:13:01 crc kubenswrapper[4745]: I1208 00:13:01.394827 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" event={"ID":"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2","Type":"ContainerDied","Data":"908b7ba52d7a827e666500f661a188de76498a6ab6ff6396d5922f7094a4147f"} Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.147055 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.178526 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2"] Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180316 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" containerName="route-controller-manager" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180359 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" containerName="route-controller-manager" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180371 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180378 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180386 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180392 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180403 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc05c216-95a6-4890-9fc7-7eb70233e104" containerName="marketplace-operator" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180409 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc05c216-95a6-4890-9fc7-7eb70233e104" containerName="marketplace-operator" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180434 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180440 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180448 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180454 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180463 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180469 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180479 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180484 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180492 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180498 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180504 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180509 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="extract-utilities" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180519 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180525 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180530 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180537 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180544 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180549 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: E1208 00:13:02.180557 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180562 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="extract-content" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180670 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="289a5ef6-f5f7-4225-9790-5cbc5c1bcee0" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180679 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9bdca4d-dada-48b2-b9aa-43dd3801eb93" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180688 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd084f85-b44a-4016-9fbd-5f051c4e9a53" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180696 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc05c216-95a6-4890-9fc7-7eb70233e104" containerName="marketplace-operator" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180703 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" containerName="route-controller-manager" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.180714 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="76ddbfcb-3447-4c68-a36e-fc310ab2f75b" containerName="registry-server" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.181086 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.194959 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2"] Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.250722 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252408 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-client-ca\") pod \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252453 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-config\") pod \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252480 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-serving-cert\") pod \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252511 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4fhg\" (UniqueName: \"kubernetes.io/projected/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-kube-api-access-w4fhg\") pod \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\" (UID: \"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252653 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-serving-cert\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252715 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-client-ca\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252779 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v584\" (UniqueName: \"kubernetes.io/projected/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-kube-api-access-5v584\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.252801 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-config\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.253418 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-client-ca" (OuterVolumeSpecName: "client-ca") pod "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" (UID: "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.253709 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-config" (OuterVolumeSpecName: "config") pod "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" (UID: "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.258838 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" (UID: "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.258969 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-kube-api-access-w4fhg" (OuterVolumeSpecName: "kube-api-access-w4fhg") pod "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" (UID: "e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2"). InnerVolumeSpecName "kube-api-access-w4fhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.353735 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2f7754-e515-4e1d-8aa4-279569aac0e6-serving-cert\") pod \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.353831 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-client-ca\") pod \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.353853 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czstx\" (UniqueName: \"kubernetes.io/projected/5e2f7754-e515-4e1d-8aa4-279569aac0e6-kube-api-access-czstx\") pod \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.353879 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-config\") pod \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.353968 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-proxy-ca-bundles\") pod \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\" (UID: \"5e2f7754-e515-4e1d-8aa4-279569aac0e6\") " Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.354098 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-client-ca\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.354132 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v584\" (UniqueName: \"kubernetes.io/projected/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-kube-api-access-5v584\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355003 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5e2f7754-e515-4e1d-8aa4-279569aac0e6" (UID: "5e2f7754-e515-4e1d-8aa4-279569aac0e6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355173 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-client-ca\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355168 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-config" (OuterVolumeSpecName: "config") pod "5e2f7754-e515-4e1d-8aa4-279569aac0e6" (UID: "5e2f7754-e515-4e1d-8aa4-279569aac0e6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355250 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-config\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355286 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-serving-cert\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355339 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355350 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355360 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355369 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355434 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355445 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4fhg\" (UniqueName: \"kubernetes.io/projected/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2-kube-api-access-w4fhg\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355577 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-config\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.355651 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-client-ca" (OuterVolumeSpecName: "client-ca") pod "5e2f7754-e515-4e1d-8aa4-279569aac0e6" (UID: "5e2f7754-e515-4e1d-8aa4-279569aac0e6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.356917 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e2f7754-e515-4e1d-8aa4-279569aac0e6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5e2f7754-e515-4e1d-8aa4-279569aac0e6" (UID: "5e2f7754-e515-4e1d-8aa4-279569aac0e6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.365292 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e2f7754-e515-4e1d-8aa4-279569aac0e6-kube-api-access-czstx" (OuterVolumeSpecName: "kube-api-access-czstx") pod "5e2f7754-e515-4e1d-8aa4-279569aac0e6" (UID: "5e2f7754-e515-4e1d-8aa4-279569aac0e6"). InnerVolumeSpecName "kube-api-access-czstx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.366432 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-serving-cert\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.369401 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v584\" (UniqueName: \"kubernetes.io/projected/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-kube-api-access-5v584\") pod \"route-controller-manager-6c9688944-6nbp2\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.399804 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" event={"ID":"5e2f7754-e515-4e1d-8aa4-279569aac0e6","Type":"ContainerDied","Data":"114df2a133f268cc4fe956f23c1a7b448e3af4c8dd9452dc0a0020062cbbaecf"} Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.400204 4745 scope.go:117] "RemoveContainer" containerID="7a972abaf78fb8b29f3374d2e187554d8eea5d9bf43235a57ab93607d0cda933" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.399886 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66b645584c-vqw5t" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.402048 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" event={"ID":"e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2","Type":"ContainerDied","Data":"d62ff8d63702e11ff18e496ca7f323e7df295e35cd9f48f90ea63a10590330ea"} Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.402106 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.420773 4745 scope.go:117] "RemoveContainer" containerID="908b7ba52d7a827e666500f661a188de76498a6ab6ff6396d5922f7094a4147f" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.431919 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66b645584c-vqw5t"] Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.435854 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-66b645584c-vqw5t"] Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.443850 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc"] Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.447189 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bdfb6ffc-l75mc"] Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.456258 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e2f7754-e515-4e1d-8aa4-279569aac0e6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.456333 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5e2f7754-e515-4e1d-8aa4-279569aac0e6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.456350 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czstx\" (UniqueName: \"kubernetes.io/projected/5e2f7754-e515-4e1d-8aa4-279569aac0e6-kube-api-access-czstx\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.503223 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.926785 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e2f7754-e515-4e1d-8aa4-279569aac0e6" path="/var/lib/kubelet/pods/5e2f7754-e515-4e1d-8aa4-279569aac0e6/volumes" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.928368 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2" path="/var/lib/kubelet/pods/e6c3ba4a-54c4-4b3e-b4d3-3fb06f062df2/volumes" Dec 08 00:13:02 crc kubenswrapper[4745]: I1208 00:13:02.948781 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2"] Dec 08 00:13:02 crc kubenswrapper[4745]: W1208 00:13:02.961642 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbeebe34_dfdb_49e9_9dd3_eb9291e77907.slice/crio-e6a10923c44e1497b3bd35e05469a8e6d8cabdd9ae1c25906125456febe8429b WatchSource:0}: Error finding container e6a10923c44e1497b3bd35e05469a8e6d8cabdd9ae1c25906125456febe8429b: Status 404 returned error can't find the container with id e6a10923c44e1497b3bd35e05469a8e6d8cabdd9ae1c25906125456febe8429b Dec 08 00:13:03 crc kubenswrapper[4745]: I1208 00:13:03.411370 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" event={"ID":"bbeebe34-dfdb-49e9-9dd3-eb9291e77907","Type":"ContainerStarted","Data":"e6a10923c44e1497b3bd35e05469a8e6d8cabdd9ae1c25906125456febe8429b"} Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.420200 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" event={"ID":"bbeebe34-dfdb-49e9-9dd3-eb9291e77907","Type":"ContainerStarted","Data":"a833f1f11450b46ca94eaeecf1a243285da91495cb02769bedcd1a431037b242"} Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.421661 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.441807 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" podStartSLOduration=4.44179107 podStartE2EDuration="4.44179107s" podCreationTimestamp="2025-12-08 00:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:13:04.437503372 +0000 UTC m=+339.866709702" watchObservedRunningTime="2025-12-08 00:13:04.44179107 +0000 UTC m=+339.870997370" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.456160 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.677733 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6"] Dec 08 00:13:04 crc kubenswrapper[4745]: E1208 00:13:04.677963 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e2f7754-e515-4e1d-8aa4-279569aac0e6" containerName="controller-manager" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.677977 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e2f7754-e515-4e1d-8aa4-279569aac0e6" containerName="controller-manager" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.678121 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e2f7754-e515-4e1d-8aa4-279569aac0e6" containerName="controller-manager" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.678513 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.680028 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.681131 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.681625 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.682465 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.682695 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.685043 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.698468 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6"] Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.706071 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.800393 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a72f9842-b59f-4a69-a6a9-e111389a99f1-serving-cert\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.800427 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-client-ca\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.800499 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r8sb\" (UniqueName: \"kubernetes.io/projected/a72f9842-b59f-4a69-a6a9-e111389a99f1-kube-api-access-4r8sb\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.800612 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-proxy-ca-bundles\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.800676 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-config\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.902346 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r8sb\" (UniqueName: \"kubernetes.io/projected/a72f9842-b59f-4a69-a6a9-e111389a99f1-kube-api-access-4r8sb\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.902417 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-proxy-ca-bundles\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.902469 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-config\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.902511 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a72f9842-b59f-4a69-a6a9-e111389a99f1-serving-cert\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.902542 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-client-ca\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.903568 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-client-ca\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.903762 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-proxy-ca-bundles\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.903786 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-config\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.916884 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a72f9842-b59f-4a69-a6a9-e111389a99f1-serving-cert\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:04 crc kubenswrapper[4745]: I1208 00:13:04.924551 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r8sb\" (UniqueName: \"kubernetes.io/projected/a72f9842-b59f-4a69-a6a9-e111389a99f1-kube-api-access-4r8sb\") pod \"controller-manager-65cc68ffb8-h4kt6\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:05 crc kubenswrapper[4745]: I1208 00:13:05.017042 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:05 crc kubenswrapper[4745]: I1208 00:13:05.412719 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6"] Dec 08 00:13:05 crc kubenswrapper[4745]: W1208 00:13:05.417753 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda72f9842_b59f_4a69_a6a9_e111389a99f1.slice/crio-edeb876056fd1451c30bb8baf82bdb7657543ad518b0009b149fd6a2460162cd WatchSource:0}: Error finding container edeb876056fd1451c30bb8baf82bdb7657543ad518b0009b149fd6a2460162cd: Status 404 returned error can't find the container with id edeb876056fd1451c30bb8baf82bdb7657543ad518b0009b149fd6a2460162cd Dec 08 00:13:05 crc kubenswrapper[4745]: I1208 00:13:05.427316 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" event={"ID":"a72f9842-b59f-4a69-a6a9-e111389a99f1","Type":"ContainerStarted","Data":"edeb876056fd1451c30bb8baf82bdb7657543ad518b0009b149fd6a2460162cd"} Dec 08 00:13:06 crc kubenswrapper[4745]: I1208 00:13:06.435736 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" event={"ID":"a72f9842-b59f-4a69-a6a9-e111389a99f1","Type":"ContainerStarted","Data":"4568604ef7d15c84113be44ec79c25f5a9b53ca362afb6ea586712298ea9a0f0"} Dec 08 00:13:06 crc kubenswrapper[4745]: I1208 00:13:06.457087 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" podStartSLOduration=6.457068576 podStartE2EDuration="6.457068576s" podCreationTimestamp="2025-12-08 00:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:13:06.454738057 +0000 UTC m=+341.883944377" watchObservedRunningTime="2025-12-08 00:13:06.457068576 +0000 UTC m=+341.886274896" Dec 08 00:13:07 crc kubenswrapper[4745]: I1208 00:13:07.440414 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:07 crc kubenswrapper[4745]: I1208 00:13:07.445030 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:20 crc kubenswrapper[4745]: I1208 00:13:20.273178 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6"] Dec 08 00:13:20 crc kubenswrapper[4745]: I1208 00:13:20.273998 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" podUID="a72f9842-b59f-4a69-a6a9-e111389a99f1" containerName="controller-manager" containerID="cri-o://4568604ef7d15c84113be44ec79c25f5a9b53ca362afb6ea586712298ea9a0f0" gracePeriod=30 Dec 08 00:13:20 crc kubenswrapper[4745]: I1208 00:13:20.286123 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2"] Dec 08 00:13:20 crc kubenswrapper[4745]: I1208 00:13:20.286415 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" podUID="bbeebe34-dfdb-49e9-9dd3-eb9291e77907" containerName="route-controller-manager" containerID="cri-o://a833f1f11450b46ca94eaeecf1a243285da91495cb02769bedcd1a431037b242" gracePeriod=30 Dec 08 00:13:21 crc kubenswrapper[4745]: I1208 00:13:21.506365 4745 generic.go:334] "Generic (PLEG): container finished" podID="a72f9842-b59f-4a69-a6a9-e111389a99f1" containerID="4568604ef7d15c84113be44ec79c25f5a9b53ca362afb6ea586712298ea9a0f0" exitCode=0 Dec 08 00:13:21 crc kubenswrapper[4745]: I1208 00:13:21.506511 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" event={"ID":"a72f9842-b59f-4a69-a6a9-e111389a99f1","Type":"ContainerDied","Data":"4568604ef7d15c84113be44ec79c25f5a9b53ca362afb6ea586712298ea9a0f0"} Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.100349 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.126534 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-794995848b-jmcwd"] Dec 08 00:13:22 crc kubenswrapper[4745]: E1208 00:13:22.126817 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a72f9842-b59f-4a69-a6a9-e111389a99f1" containerName="controller-manager" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.126870 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="a72f9842-b59f-4a69-a6a9-e111389a99f1" containerName="controller-manager" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.127022 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="a72f9842-b59f-4a69-a6a9-e111389a99f1" containerName="controller-manager" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.127379 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.138025 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-794995848b-jmcwd"] Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.215101 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a72f9842-b59f-4a69-a6a9-e111389a99f1-serving-cert\") pod \"a72f9842-b59f-4a69-a6a9-e111389a99f1\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.215492 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-client-ca\") pod \"a72f9842-b59f-4a69-a6a9-e111389a99f1\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.215738 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-proxy-ca-bundles\") pod \"a72f9842-b59f-4a69-a6a9-e111389a99f1\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.216254 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-client-ca" (OuterVolumeSpecName: "client-ca") pod "a72f9842-b59f-4a69-a6a9-e111389a99f1" (UID: "a72f9842-b59f-4a69-a6a9-e111389a99f1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.216967 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a72f9842-b59f-4a69-a6a9-e111389a99f1" (UID: "a72f9842-b59f-4a69-a6a9-e111389a99f1"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.217195 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r8sb\" (UniqueName: \"kubernetes.io/projected/a72f9842-b59f-4a69-a6a9-e111389a99f1-kube-api-access-4r8sb\") pod \"a72f9842-b59f-4a69-a6a9-e111389a99f1\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.218169 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-config\") pod \"a72f9842-b59f-4a69-a6a9-e111389a99f1\" (UID: \"a72f9842-b59f-4a69-a6a9-e111389a99f1\") " Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.218664 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-config" (OuterVolumeSpecName: "config") pod "a72f9842-b59f-4a69-a6a9-e111389a99f1" (UID: "a72f9842-b59f-4a69-a6a9-e111389a99f1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.219082 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07183f7d-8e52-4e65-89c9-2847eaf2f73f-serving-cert\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.219343 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-proxy-ca-bundles\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.219547 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-client-ca\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.219793 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-config\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.220004 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wmg7\" (UniqueName: \"kubernetes.io/projected/07183f7d-8e52-4e65-89c9-2847eaf2f73f-kube-api-access-6wmg7\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.220218 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.220377 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.220509 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a72f9842-b59f-4a69-a6a9-e111389a99f1-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.221246 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a72f9842-b59f-4a69-a6a9-e111389a99f1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a72f9842-b59f-4a69-a6a9-e111389a99f1" (UID: "a72f9842-b59f-4a69-a6a9-e111389a99f1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.227386 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a72f9842-b59f-4a69-a6a9-e111389a99f1-kube-api-access-4r8sb" (OuterVolumeSpecName: "kube-api-access-4r8sb") pod "a72f9842-b59f-4a69-a6a9-e111389a99f1" (UID: "a72f9842-b59f-4a69-a6a9-e111389a99f1"). InnerVolumeSpecName "kube-api-access-4r8sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.328921 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-client-ca\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.330081 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-config\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.331050 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wmg7\" (UniqueName: \"kubernetes.io/projected/07183f7d-8e52-4e65-89c9-2847eaf2f73f-kube-api-access-6wmg7\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.331405 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07183f7d-8e52-4e65-89c9-2847eaf2f73f-serving-cert\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.331798 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-proxy-ca-bundles\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.331900 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r8sb\" (UniqueName: \"kubernetes.io/projected/a72f9842-b59f-4a69-a6a9-e111389a99f1-kube-api-access-4r8sb\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.331992 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a72f9842-b59f-4a69-a6a9-e111389a99f1-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.331014 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-config\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.330009 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-client-ca\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.332858 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-proxy-ca-bundles\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.334393 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07183f7d-8e52-4e65-89c9-2847eaf2f73f-serving-cert\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.350919 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wmg7\" (UniqueName: \"kubernetes.io/projected/07183f7d-8e52-4e65-89c9-2847eaf2f73f-kube-api-access-6wmg7\") pod \"controller-manager-794995848b-jmcwd\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.452161 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.460911 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.460974 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.504743 4745 patch_prober.go:28] interesting pod/route-controller-manager-6c9688944-6nbp2 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" start-of-body= Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.505051 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" podUID="bbeebe34-dfdb-49e9-9dd3-eb9291e77907" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.514550 4745 generic.go:334] "Generic (PLEG): container finished" podID="bbeebe34-dfdb-49e9-9dd3-eb9291e77907" containerID="a833f1f11450b46ca94eaeecf1a243285da91495cb02769bedcd1a431037b242" exitCode=0 Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.514892 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" event={"ID":"bbeebe34-dfdb-49e9-9dd3-eb9291e77907","Type":"ContainerDied","Data":"a833f1f11450b46ca94eaeecf1a243285da91495cb02769bedcd1a431037b242"} Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.517074 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" event={"ID":"a72f9842-b59f-4a69-a6a9-e111389a99f1","Type":"ContainerDied","Data":"edeb876056fd1451c30bb8baf82bdb7657543ad518b0009b149fd6a2460162cd"} Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.517210 4745 scope.go:117] "RemoveContainer" containerID="4568604ef7d15c84113be44ec79c25f5a9b53ca362afb6ea586712298ea9a0f0" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.517237 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6" Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.556686 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6"] Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.559628 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-h4kt6"] Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.871941 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-794995848b-jmcwd"] Dec 08 00:13:22 crc kubenswrapper[4745]: I1208 00:13:22.918074 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a72f9842-b59f-4a69-a6a9-e111389a99f1" path="/var/lib/kubelet/pods/a72f9842-b59f-4a69-a6a9-e111389a99f1/volumes" Dec 08 00:13:23 crc kubenswrapper[4745]: I1208 00:13:23.525283 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" event={"ID":"07183f7d-8e52-4e65-89c9-2847eaf2f73f","Type":"ContainerStarted","Data":"93ce8c34034ef7e53ac15a34211e3ded12161a88b34ee1ab6552c5ae467ab796"} Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.110733 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.143286 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp"] Dec 08 00:13:24 crc kubenswrapper[4745]: E1208 00:13:24.143652 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbeebe34-dfdb-49e9-9dd3-eb9291e77907" containerName="route-controller-manager" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.143678 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbeebe34-dfdb-49e9-9dd3-eb9291e77907" containerName="route-controller-manager" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.143853 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbeebe34-dfdb-49e9-9dd3-eb9291e77907" containerName="route-controller-manager" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.144615 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.153103 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp"] Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.261185 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-serving-cert\") pod \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.261354 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-config\") pod \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.261415 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-client-ca\") pod \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.261463 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v584\" (UniqueName: \"kubernetes.io/projected/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-kube-api-access-5v584\") pod \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\" (UID: \"bbeebe34-dfdb-49e9-9dd3-eb9291e77907\") " Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.261875 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bc42007-dbc8-49f7-b955-f7a7fee4469d-client-ca\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.262021 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bc42007-dbc8-49f7-b955-f7a7fee4469d-serving-cert\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.262113 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bc42007-dbc8-49f7-b955-f7a7fee4469d-config\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.262177 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-config" (OuterVolumeSpecName: "config") pod "bbeebe34-dfdb-49e9-9dd3-eb9291e77907" (UID: "bbeebe34-dfdb-49e9-9dd3-eb9291e77907"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.262207 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw76q\" (UniqueName: \"kubernetes.io/projected/7bc42007-dbc8-49f7-b955-f7a7fee4469d-kube-api-access-rw76q\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.262321 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.262425 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-client-ca" (OuterVolumeSpecName: "client-ca") pod "bbeebe34-dfdb-49e9-9dd3-eb9291e77907" (UID: "bbeebe34-dfdb-49e9-9dd3-eb9291e77907"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.270115 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bbeebe34-dfdb-49e9-9dd3-eb9291e77907" (UID: "bbeebe34-dfdb-49e9-9dd3-eb9291e77907"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.270291 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-kube-api-access-5v584" (OuterVolumeSpecName: "kube-api-access-5v584") pod "bbeebe34-dfdb-49e9-9dd3-eb9291e77907" (UID: "bbeebe34-dfdb-49e9-9dd3-eb9291e77907"). InnerVolumeSpecName "kube-api-access-5v584". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.363373 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bc42007-dbc8-49f7-b955-f7a7fee4469d-config\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.363449 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw76q\" (UniqueName: \"kubernetes.io/projected/7bc42007-dbc8-49f7-b955-f7a7fee4469d-kube-api-access-rw76q\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.363520 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bc42007-dbc8-49f7-b955-f7a7fee4469d-client-ca\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.363554 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bc42007-dbc8-49f7-b955-f7a7fee4469d-serving-cert\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.363606 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.363625 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v584\" (UniqueName: \"kubernetes.io/projected/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-kube-api-access-5v584\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.363639 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbeebe34-dfdb-49e9-9dd3-eb9291e77907-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.365315 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bc42007-dbc8-49f7-b955-f7a7fee4469d-client-ca\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.365906 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bc42007-dbc8-49f7-b955-f7a7fee4469d-config\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.369365 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bc42007-dbc8-49f7-b955-f7a7fee4469d-serving-cert\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.387546 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw76q\" (UniqueName: \"kubernetes.io/projected/7bc42007-dbc8-49f7-b955-f7a7fee4469d-kube-api-access-rw76q\") pod \"route-controller-manager-6c9688944-nwglp\" (UID: \"7bc42007-dbc8-49f7-b955-f7a7fee4469d\") " pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.466450 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.542509 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" event={"ID":"bbeebe34-dfdb-49e9-9dd3-eb9291e77907","Type":"ContainerDied","Data":"e6a10923c44e1497b3bd35e05469a8e6d8cabdd9ae1c25906125456febe8429b"} Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.542584 4745 scope.go:117] "RemoveContainer" containerID="a833f1f11450b46ca94eaeecf1a243285da91495cb02769bedcd1a431037b242" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.542753 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.628773 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2"] Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.638470 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-6nbp2"] Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.892485 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbeebe34-dfdb-49e9-9dd3-eb9291e77907" path="/var/lib/kubelet/pods/bbeebe34-dfdb-49e9-9dd3-eb9291e77907/volumes" Dec 08 00:13:24 crc kubenswrapper[4745]: I1208 00:13:24.966023 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp"] Dec 08 00:13:24 crc kubenswrapper[4745]: W1208 00:13:24.974546 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bc42007_dbc8_49f7_b955_f7a7fee4469d.slice/crio-a9421c9e8d71b39fdc353d904de9f25bf42a0b77b058cb61e9052b644c0b2fa2 WatchSource:0}: Error finding container a9421c9e8d71b39fdc353d904de9f25bf42a0b77b058cb61e9052b644c0b2fa2: Status 404 returned error can't find the container with id a9421c9e8d71b39fdc353d904de9f25bf42a0b77b058cb61e9052b644c0b2fa2 Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.549208 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" event={"ID":"07183f7d-8e52-4e65-89c9-2847eaf2f73f","Type":"ContainerStarted","Data":"25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04"} Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.549685 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.551778 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" event={"ID":"7bc42007-dbc8-49f7-b955-f7a7fee4469d","Type":"ContainerStarted","Data":"14b400e03f5af7f49e3e536071631e7355be58983e3a42c28132318266d553de"} Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.551814 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" event={"ID":"7bc42007-dbc8-49f7-b955-f7a7fee4469d","Type":"ContainerStarted","Data":"a9421c9e8d71b39fdc353d904de9f25bf42a0b77b058cb61e9052b644c0b2fa2"} Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.552496 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.559284 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.567233 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" podStartSLOduration=5.567212739 podStartE2EDuration="5.567212739s" podCreationTimestamp="2025-12-08 00:13:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:13:25.56589691 +0000 UTC m=+360.995103220" watchObservedRunningTime="2025-12-08 00:13:25.567212739 +0000 UTC m=+360.996419039" Dec 08 00:13:25 crc kubenswrapper[4745]: I1208 00:13:25.604297 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" podStartSLOduration=3.6042827170000002 podStartE2EDuration="3.604282717s" podCreationTimestamp="2025-12-08 00:13:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:13:25.584772349 +0000 UTC m=+361.013978649" watchObservedRunningTime="2025-12-08 00:13:25.604282717 +0000 UTC m=+361.033489017" Dec 08 00:13:26 crc kubenswrapper[4745]: I1208 00:13:26.072807 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6c9688944-nwglp" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.078666 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r57vv"] Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.080520 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.094138 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r57vv"] Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186506 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnpfx\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-kube-api-access-fnpfx\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186568 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-bound-sa-token\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186601 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-registry-tls\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186623 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/30edd697-e73f-40ec-91a6-3d0241946392-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186715 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186750 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/30edd697-e73f-40ec-91a6-3d0241946392-registry-certificates\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186766 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/30edd697-e73f-40ec-91a6-3d0241946392-trusted-ca\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.186789 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/30edd697-e73f-40ec-91a6-3d0241946392-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.229439 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288128 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnpfx\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-kube-api-access-fnpfx\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288204 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-bound-sa-token\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288241 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-registry-tls\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288275 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/30edd697-e73f-40ec-91a6-3d0241946392-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288330 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/30edd697-e73f-40ec-91a6-3d0241946392-trusted-ca\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288345 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/30edd697-e73f-40ec-91a6-3d0241946392-registry-certificates\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288364 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/30edd697-e73f-40ec-91a6-3d0241946392-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.288763 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/30edd697-e73f-40ec-91a6-3d0241946392-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.291369 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/30edd697-e73f-40ec-91a6-3d0241946392-trusted-ca\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.291383 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/30edd697-e73f-40ec-91a6-3d0241946392-registry-certificates\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.297793 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/30edd697-e73f-40ec-91a6-3d0241946392-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.303447 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-registry-tls\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.306705 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-bound-sa-token\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.306731 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnpfx\" (UniqueName: \"kubernetes.io/projected/30edd697-e73f-40ec-91a6-3d0241946392-kube-api-access-fnpfx\") pod \"image-registry-66df7c8f76-r57vv\" (UID: \"30edd697-e73f-40ec-91a6-3d0241946392\") " pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.406048 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:31 crc kubenswrapper[4745]: I1208 00:13:31.881058 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r57vv"] Dec 08 00:13:32 crc kubenswrapper[4745]: I1208 00:13:32.593276 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" event={"ID":"30edd697-e73f-40ec-91a6-3d0241946392","Type":"ContainerStarted","Data":"8620386e44c371bfb91e020c38eed78e69487a1f68040acbad130a1c9f562a3a"} Dec 08 00:13:32 crc kubenswrapper[4745]: I1208 00:13:32.593706 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" event={"ID":"30edd697-e73f-40ec-91a6-3d0241946392","Type":"ContainerStarted","Data":"e4fb2e9c144e4d1f426c6e3dd808d16401de54cc532166fd6f056932b2adab00"} Dec 08 00:13:32 crc kubenswrapper[4745]: I1208 00:13:32.593748 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:32 crc kubenswrapper[4745]: I1208 00:13:32.619787 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" podStartSLOduration=1.61975913 podStartE2EDuration="1.61975913s" podCreationTimestamp="2025-12-08 00:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:13:32.612576127 +0000 UTC m=+368.041782447" watchObservedRunningTime="2025-12-08 00:13:32.61975913 +0000 UTC m=+368.048965450" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.176687 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4x96l"] Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.178578 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.180532 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.194631 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4x96l"] Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.269734 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcrh5\" (UniqueName: \"kubernetes.io/projected/0d07f882-dd50-42e8-8619-1acf9a483dc2-kube-api-access-qcrh5\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.269861 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d07f882-dd50-42e8-8619-1acf9a483dc2-catalog-content\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.270072 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d07f882-dd50-42e8-8619-1acf9a483dc2-utilities\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.371544 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d07f882-dd50-42e8-8619-1acf9a483dc2-utilities\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.371686 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcrh5\" (UniqueName: \"kubernetes.io/projected/0d07f882-dd50-42e8-8619-1acf9a483dc2-kube-api-access-qcrh5\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.371744 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d07f882-dd50-42e8-8619-1acf9a483dc2-catalog-content\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.372145 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d07f882-dd50-42e8-8619-1acf9a483dc2-utilities\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.372353 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d07f882-dd50-42e8-8619-1acf9a483dc2-catalog-content\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.375324 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z697b"] Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.376557 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.379338 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.389640 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z697b"] Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.409839 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcrh5\" (UniqueName: \"kubernetes.io/projected/0d07f882-dd50-42e8-8619-1acf9a483dc2-kube-api-access-qcrh5\") pod \"redhat-operators-4x96l\" (UID: \"0d07f882-dd50-42e8-8619-1acf9a483dc2\") " pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.473640 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf7dp\" (UniqueName: \"kubernetes.io/projected/448a7524-1168-4f41-a50c-48e06c2440d5-kube-api-access-sf7dp\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.473959 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/448a7524-1168-4f41-a50c-48e06c2440d5-catalog-content\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.474021 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/448a7524-1168-4f41-a50c-48e06c2440d5-utilities\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.505596 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.575110 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/448a7524-1168-4f41-a50c-48e06c2440d5-utilities\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.575254 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf7dp\" (UniqueName: \"kubernetes.io/projected/448a7524-1168-4f41-a50c-48e06c2440d5-kube-api-access-sf7dp\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.575346 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/448a7524-1168-4f41-a50c-48e06c2440d5-catalog-content\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.575651 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/448a7524-1168-4f41-a50c-48e06c2440d5-utilities\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.576132 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/448a7524-1168-4f41-a50c-48e06c2440d5-catalog-content\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.597703 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf7dp\" (UniqueName: \"kubernetes.io/projected/448a7524-1168-4f41-a50c-48e06c2440d5-kube-api-access-sf7dp\") pod \"certified-operators-z697b\" (UID: \"448a7524-1168-4f41-a50c-48e06c2440d5\") " pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.711284 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:36 crc kubenswrapper[4745]: I1208 00:13:36.911565 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4x96l"] Dec 08 00:13:36 crc kubenswrapper[4745]: W1208 00:13:36.912983 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d07f882_dd50_42e8_8619_1acf9a483dc2.slice/crio-7da244f2b758fd1bfadf48b69abef49aaf5adb0f4f89cea78c88ee5f0146988d WatchSource:0}: Error finding container 7da244f2b758fd1bfadf48b69abef49aaf5adb0f4f89cea78c88ee5f0146988d: Status 404 returned error can't find the container with id 7da244f2b758fd1bfadf48b69abef49aaf5adb0f4f89cea78c88ee5f0146988d Dec 08 00:13:37 crc kubenswrapper[4745]: I1208 00:13:37.139798 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z697b"] Dec 08 00:13:37 crc kubenswrapper[4745]: I1208 00:13:37.635347 4745 generic.go:334] "Generic (PLEG): container finished" podID="0d07f882-dd50-42e8-8619-1acf9a483dc2" containerID="7d86f7d613a5419eee17a215c895f4abc3a25aa6576afc236364f218a734cecd" exitCode=0 Dec 08 00:13:37 crc kubenswrapper[4745]: I1208 00:13:37.635436 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4x96l" event={"ID":"0d07f882-dd50-42e8-8619-1acf9a483dc2","Type":"ContainerDied","Data":"7d86f7d613a5419eee17a215c895f4abc3a25aa6576afc236364f218a734cecd"} Dec 08 00:13:37 crc kubenswrapper[4745]: I1208 00:13:37.635535 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4x96l" event={"ID":"0d07f882-dd50-42e8-8619-1acf9a483dc2","Type":"ContainerStarted","Data":"7da244f2b758fd1bfadf48b69abef49aaf5adb0f4f89cea78c88ee5f0146988d"} Dec 08 00:13:37 crc kubenswrapper[4745]: I1208 00:13:37.638804 4745 generic.go:334] "Generic (PLEG): container finished" podID="448a7524-1168-4f41-a50c-48e06c2440d5" containerID="4e407cddd39f4c137a2c035108dffcd43d9b517944bb0a075525af993c6e5c17" exitCode=0 Dec 08 00:13:37 crc kubenswrapper[4745]: I1208 00:13:37.638848 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z697b" event={"ID":"448a7524-1168-4f41-a50c-48e06c2440d5","Type":"ContainerDied","Data":"4e407cddd39f4c137a2c035108dffcd43d9b517944bb0a075525af993c6e5c17"} Dec 08 00:13:37 crc kubenswrapper[4745]: I1208 00:13:37.638884 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z697b" event={"ID":"448a7524-1168-4f41-a50c-48e06c2440d5","Type":"ContainerStarted","Data":"0113b1b0741a1425fcf70f1630e2977c1543b3c438f5304efb2a95530d8c718d"} Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.576953 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xlt2t"] Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.578442 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.581531 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.586696 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xlt2t"] Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.645969 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z697b" event={"ID":"448a7524-1168-4f41-a50c-48e06c2440d5","Type":"ContainerStarted","Data":"789639b36b81e4b5d9879f42ee7d46e91822c7da7c95f7ca6879021fde6da36e"} Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.647584 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4x96l" event={"ID":"0d07f882-dd50-42e8-8619-1acf9a483dc2","Type":"ContainerStarted","Data":"75ecab76127c3906dc35a25110641c5bc07de3b95c514c2418d7e2aa0b4241d7"} Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.702822 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-catalog-content\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.702995 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmh8l\" (UniqueName: \"kubernetes.io/projected/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-kube-api-access-xmh8l\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.703028 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-utilities\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.774396 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-64bk6"] Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.775328 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.778615 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.789827 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-64bk6"] Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.804589 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmh8l\" (UniqueName: \"kubernetes.io/projected/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-kube-api-access-xmh8l\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.804665 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-utilities\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.804730 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-catalog-content\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.805494 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-catalog-content\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.806140 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-utilities\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.827980 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmh8l\" (UniqueName: \"kubernetes.io/projected/e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6-kube-api-access-xmh8l\") pod \"community-operators-xlt2t\" (UID: \"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6\") " pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.894169 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.905521 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-catalog-content\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.905630 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2d9m\" (UniqueName: \"kubernetes.io/projected/09f4c527-282b-4f8e-98ff-3e3032ab2896-kube-api-access-f2d9m\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:38 crc kubenswrapper[4745]: I1208 00:13:38.905711 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-utilities\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.006812 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2d9m\" (UniqueName: \"kubernetes.io/projected/09f4c527-282b-4f8e-98ff-3e3032ab2896-kube-api-access-f2d9m\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.007299 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-utilities\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.007334 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-catalog-content\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.008013 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-catalog-content\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.010175 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-utilities\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.035420 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2d9m\" (UniqueName: \"kubernetes.io/projected/09f4c527-282b-4f8e-98ff-3e3032ab2896-kube-api-access-f2d9m\") pod \"redhat-marketplace-64bk6\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.091115 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.296412 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xlt2t"] Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.457914 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-64bk6"] Dec 08 00:13:39 crc kubenswrapper[4745]: W1208 00:13:39.507921 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09f4c527_282b_4f8e_98ff_3e3032ab2896.slice/crio-735ebe81e043a9be62c5850d84c9fcf5c73bad89714fd3973dd17f41104c5d0e WatchSource:0}: Error finding container 735ebe81e043a9be62c5850d84c9fcf5c73bad89714fd3973dd17f41104c5d0e: Status 404 returned error can't find the container with id 735ebe81e043a9be62c5850d84c9fcf5c73bad89714fd3973dd17f41104c5d0e Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.663897 4745 generic.go:334] "Generic (PLEG): container finished" podID="e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6" containerID="6a4a843ea3c761dcfdd3ab3886d526599d44694314c1d1c9054b49f3e0bf1a0c" exitCode=0 Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.663969 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlt2t" event={"ID":"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6","Type":"ContainerDied","Data":"6a4a843ea3c761dcfdd3ab3886d526599d44694314c1d1c9054b49f3e0bf1a0c"} Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.664027 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlt2t" event={"ID":"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6","Type":"ContainerStarted","Data":"350ca305eb87c0c76a8cf4cd1521582ed53973cde415e4e1b7cff57f85440cdf"} Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.665549 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64bk6" event={"ID":"09f4c527-282b-4f8e-98ff-3e3032ab2896","Type":"ContainerStarted","Data":"496e81b8072ff225b0704ea7f0de54d1d32bb16589e7c909e70d918734f2fc61"} Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.665588 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64bk6" event={"ID":"09f4c527-282b-4f8e-98ff-3e3032ab2896","Type":"ContainerStarted","Data":"735ebe81e043a9be62c5850d84c9fcf5c73bad89714fd3973dd17f41104c5d0e"} Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.668774 4745 generic.go:334] "Generic (PLEG): container finished" podID="0d07f882-dd50-42e8-8619-1acf9a483dc2" containerID="75ecab76127c3906dc35a25110641c5bc07de3b95c514c2418d7e2aa0b4241d7" exitCode=0 Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.668817 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4x96l" event={"ID":"0d07f882-dd50-42e8-8619-1acf9a483dc2","Type":"ContainerDied","Data":"75ecab76127c3906dc35a25110641c5bc07de3b95c514c2418d7e2aa0b4241d7"} Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.670340 4745 generic.go:334] "Generic (PLEG): container finished" podID="448a7524-1168-4f41-a50c-48e06c2440d5" containerID="789639b36b81e4b5d9879f42ee7d46e91822c7da7c95f7ca6879021fde6da36e" exitCode=0 Dec 08 00:13:39 crc kubenswrapper[4745]: I1208 00:13:39.670378 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z697b" event={"ID":"448a7524-1168-4f41-a50c-48e06c2440d5","Type":"ContainerDied","Data":"789639b36b81e4b5d9879f42ee7d46e91822c7da7c95f7ca6879021fde6da36e"} Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.591335 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-794995848b-jmcwd"] Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.591600 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" podUID="07183f7d-8e52-4e65-89c9-2847eaf2f73f" containerName="controller-manager" containerID="cri-o://25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04" gracePeriod=30 Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.675786 4745 generic.go:334] "Generic (PLEG): container finished" podID="e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6" containerID="536aff9c5809156af0e0a84c97192ee8984e2c95c9b184e282fdbca02ab563df" exitCode=0 Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.675849 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlt2t" event={"ID":"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6","Type":"ContainerDied","Data":"536aff9c5809156af0e0a84c97192ee8984e2c95c9b184e282fdbca02ab563df"} Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.677195 4745 generic.go:334] "Generic (PLEG): container finished" podID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerID="496e81b8072ff225b0704ea7f0de54d1d32bb16589e7c909e70d918734f2fc61" exitCode=0 Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.677256 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64bk6" event={"ID":"09f4c527-282b-4f8e-98ff-3e3032ab2896","Type":"ContainerDied","Data":"496e81b8072ff225b0704ea7f0de54d1d32bb16589e7c909e70d918734f2fc61"} Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.679460 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4x96l" event={"ID":"0d07f882-dd50-42e8-8619-1acf9a483dc2","Type":"ContainerStarted","Data":"628fc67f1b78f05b02b116ddc51ed6bde43ec0f2b9c205eef845fecc2f812408"} Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.681966 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z697b" event={"ID":"448a7524-1168-4f41-a50c-48e06c2440d5","Type":"ContainerStarted","Data":"69762a207ff51abbd89339fc8d5f4dc2d4186008e0768074292bdd5a3b571428"} Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.715865 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4x96l" podStartSLOduration=2.2947754 podStartE2EDuration="4.715846571s" podCreationTimestamp="2025-12-08 00:13:36 +0000 UTC" firstStartedPulling="2025-12-08 00:13:37.638548466 +0000 UTC m=+373.067754766" lastFinishedPulling="2025-12-08 00:13:40.059619637 +0000 UTC m=+375.488825937" observedRunningTime="2025-12-08 00:13:40.711041269 +0000 UTC m=+376.140247579" watchObservedRunningTime="2025-12-08 00:13:40.715846571 +0000 UTC m=+376.145052871" Dec 08 00:13:40 crc kubenswrapper[4745]: I1208 00:13:40.733760 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z697b" podStartSLOduration=2.2053972809999998 podStartE2EDuration="4.733740881s" podCreationTimestamp="2025-12-08 00:13:36 +0000 UTC" firstStartedPulling="2025-12-08 00:13:37.642273456 +0000 UTC m=+373.071479766" lastFinishedPulling="2025-12-08 00:13:40.170617066 +0000 UTC m=+375.599823366" observedRunningTime="2025-12-08 00:13:40.729153365 +0000 UTC m=+376.158359675" watchObservedRunningTime="2025-12-08 00:13:40.733740881 +0000 UTC m=+376.162947191" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.102563 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.236351 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07183f7d-8e52-4e65-89c9-2847eaf2f73f-serving-cert\") pod \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.236450 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-proxy-ca-bundles\") pod \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.236491 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-config\") pod \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.236570 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-client-ca\") pod \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.236592 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wmg7\" (UniqueName: \"kubernetes.io/projected/07183f7d-8e52-4e65-89c9-2847eaf2f73f-kube-api-access-6wmg7\") pod \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\" (UID: \"07183f7d-8e52-4e65-89c9-2847eaf2f73f\") " Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.238043 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "07183f7d-8e52-4e65-89c9-2847eaf2f73f" (UID: "07183f7d-8e52-4e65-89c9-2847eaf2f73f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.238076 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-client-ca" (OuterVolumeSpecName: "client-ca") pod "07183f7d-8e52-4e65-89c9-2847eaf2f73f" (UID: "07183f7d-8e52-4e65-89c9-2847eaf2f73f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.238115 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-config" (OuterVolumeSpecName: "config") pod "07183f7d-8e52-4e65-89c9-2847eaf2f73f" (UID: "07183f7d-8e52-4e65-89c9-2847eaf2f73f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.241457 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07183f7d-8e52-4e65-89c9-2847eaf2f73f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "07183f7d-8e52-4e65-89c9-2847eaf2f73f" (UID: "07183f7d-8e52-4e65-89c9-2847eaf2f73f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.248162 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07183f7d-8e52-4e65-89c9-2847eaf2f73f-kube-api-access-6wmg7" (OuterVolumeSpecName: "kube-api-access-6wmg7") pod "07183f7d-8e52-4e65-89c9-2847eaf2f73f" (UID: "07183f7d-8e52-4e65-89c9-2847eaf2f73f"). InnerVolumeSpecName "kube-api-access-6wmg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.337948 4745 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.338016 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wmg7\" (UniqueName: \"kubernetes.io/projected/07183f7d-8e52-4e65-89c9-2847eaf2f73f-kube-api-access-6wmg7\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.338031 4745 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/07183f7d-8e52-4e65-89c9-2847eaf2f73f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.338042 4745 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.338054 4745 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07183f7d-8e52-4e65-89c9-2847eaf2f73f-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.687894 4745 generic.go:334] "Generic (PLEG): container finished" podID="07183f7d-8e52-4e65-89c9-2847eaf2f73f" containerID="25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04" exitCode=0 Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.687954 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.687967 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" event={"ID":"07183f7d-8e52-4e65-89c9-2847eaf2f73f","Type":"ContainerDied","Data":"25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04"} Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.688402 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-794995848b-jmcwd" event={"ID":"07183f7d-8e52-4e65-89c9-2847eaf2f73f","Type":"ContainerDied","Data":"93ce8c34034ef7e53ac15a34211e3ded12161a88b34ee1ab6552c5ae467ab796"} Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.688426 4745 scope.go:117] "RemoveContainer" containerID="25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.703258 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz"] Dec 08 00:13:41 crc kubenswrapper[4745]: E1208 00:13:41.703492 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07183f7d-8e52-4e65-89c9-2847eaf2f73f" containerName="controller-manager" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.703510 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="07183f7d-8e52-4e65-89c9-2847eaf2f73f" containerName="controller-manager" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.703605 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="07183f7d-8e52-4e65-89c9-2847eaf2f73f" containerName="controller-manager" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.703971 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.707412 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.707939 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.708076 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.710109 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.710268 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.713490 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.715436 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz"] Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.718891 4745 scope.go:117] "RemoveContainer" containerID="25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04" Dec 08 00:13:41 crc kubenswrapper[4745]: E1208 00:13:41.719373 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04\": container with ID starting with 25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04 not found: ID does not exist" containerID="25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.719507 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04"} err="failed to get container status \"25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04\": rpc error: code = NotFound desc = could not find container \"25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04\": container with ID starting with 25e9312beb78a3c242ebaafcd487e8edcf9d84dfc5c2cb18a48740969b367f04 not found: ID does not exist" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.727005 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-794995848b-jmcwd"] Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.732620 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-794995848b-jmcwd"] Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.733557 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.853329 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-proxy-ca-bundles\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.853473 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwbhf\" (UniqueName: \"kubernetes.io/projected/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-kube-api-access-lwbhf\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.853517 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-serving-cert\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.853565 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-config\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.853679 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-client-ca\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.954574 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwbhf\" (UniqueName: \"kubernetes.io/projected/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-kube-api-access-lwbhf\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.954636 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-serving-cert\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.954657 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-config\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.954678 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-client-ca\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.954727 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-proxy-ca-bundles\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.956169 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-client-ca\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.957124 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-config\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.957166 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-proxy-ca-bundles\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.959711 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-serving-cert\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:41 crc kubenswrapper[4745]: I1208 00:13:41.976159 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwbhf\" (UniqueName: \"kubernetes.io/projected/f859f8cf-cf59-4d09-90e5-44c3d2d117a1-kube-api-access-lwbhf\") pod \"controller-manager-65cc68ffb8-xf7hz\" (UID: \"f859f8cf-cf59-4d09-90e5-44c3d2d117a1\") " pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:42 crc kubenswrapper[4745]: I1208 00:13:42.031766 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:42 crc kubenswrapper[4745]: I1208 00:13:42.695290 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlt2t" event={"ID":"e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6","Type":"ContainerStarted","Data":"7824b023b9ad6d95a0ce674399e673f2a177e20e32850aa8ec68aa013aa1d656"} Dec 08 00:13:42 crc kubenswrapper[4745]: I1208 00:13:42.715269 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xlt2t" podStartSLOduration=2.663408068 podStartE2EDuration="4.715247758s" podCreationTimestamp="2025-12-08 00:13:38 +0000 UTC" firstStartedPulling="2025-12-08 00:13:39.667021473 +0000 UTC m=+375.096227773" lastFinishedPulling="2025-12-08 00:13:41.718861163 +0000 UTC m=+377.148067463" observedRunningTime="2025-12-08 00:13:42.711765545 +0000 UTC m=+378.140971865" watchObservedRunningTime="2025-12-08 00:13:42.715247758 +0000 UTC m=+378.144454058" Dec 08 00:13:42 crc kubenswrapper[4745]: I1208 00:13:42.889076 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07183f7d-8e52-4e65-89c9-2847eaf2f73f" path="/var/lib/kubelet/pods/07183f7d-8e52-4e65-89c9-2847eaf2f73f/volumes" Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.446767 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz"] Dec 08 00:13:43 crc kubenswrapper[4745]: W1208 00:13:43.451983 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf859f8cf_cf59_4d09_90e5_44c3d2d117a1.slice/crio-58e8b6c3e263cf683391577b7519031db4ab74370dbefa7baa2cc9c0b28cd184 WatchSource:0}: Error finding container 58e8b6c3e263cf683391577b7519031db4ab74370dbefa7baa2cc9c0b28cd184: Status 404 returned error can't find the container with id 58e8b6c3e263cf683391577b7519031db4ab74370dbefa7baa2cc9c0b28cd184 Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.702887 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" event={"ID":"f859f8cf-cf59-4d09-90e5-44c3d2d117a1","Type":"ContainerStarted","Data":"982041da5a105bdee6dc59018599c981a1b5d90ae7391b359f6f6ef9fbacd2a6"} Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.702952 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.702963 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" event={"ID":"f859f8cf-cf59-4d09-90e5-44c3d2d117a1","Type":"ContainerStarted","Data":"58e8b6c3e263cf683391577b7519031db4ab74370dbefa7baa2cc9c0b28cd184"} Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.704885 4745 generic.go:334] "Generic (PLEG): container finished" podID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerID="9f81aa358b7f64592ca99b9f284fdd8fac13caf0a7aeb0a3d350f7dee76f73ea" exitCode=0 Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.705008 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64bk6" event={"ID":"09f4c527-282b-4f8e-98ff-3e3032ab2896","Type":"ContainerDied","Data":"9f81aa358b7f64592ca99b9f284fdd8fac13caf0a7aeb0a3d350f7dee76f73ea"} Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.707324 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" Dec 08 00:13:43 crc kubenswrapper[4745]: I1208 00:13:43.726520 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-65cc68ffb8-xf7hz" podStartSLOduration=3.726504534 podStartE2EDuration="3.726504534s" podCreationTimestamp="2025-12-08 00:13:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:13:43.722753623 +0000 UTC m=+379.151959923" watchObservedRunningTime="2025-12-08 00:13:43.726504534 +0000 UTC m=+379.155710834" Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.506840 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.507140 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.550414 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.713240 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.713566 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.721836 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64bk6" event={"ID":"09f4c527-282b-4f8e-98ff-3e3032ab2896","Type":"ContainerStarted","Data":"3cc814b67b16f8e3acbf4d5f8014c3c387b6cd93cc42a5082762509be99b1753"} Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.753332 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:46 crc kubenswrapper[4745]: I1208 00:13:46.756790 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4x96l" Dec 08 00:13:47 crc kubenswrapper[4745]: I1208 00:13:47.761387 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-64bk6" podStartSLOduration=5.464428146 podStartE2EDuration="9.761362884s" podCreationTimestamp="2025-12-08 00:13:38 +0000 UTC" firstStartedPulling="2025-12-08 00:13:40.678905946 +0000 UTC m=+376.108112236" lastFinishedPulling="2025-12-08 00:13:44.975840674 +0000 UTC m=+380.405046974" observedRunningTime="2025-12-08 00:13:47.755345586 +0000 UTC m=+383.184551906" watchObservedRunningTime="2025-12-08 00:13:47.761362884 +0000 UTC m=+383.190569184" Dec 08 00:13:47 crc kubenswrapper[4745]: I1208 00:13:47.773095 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z697b" Dec 08 00:13:48 crc kubenswrapper[4745]: I1208 00:13:48.894518 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:48 crc kubenswrapper[4745]: I1208 00:13:48.894706 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:48 crc kubenswrapper[4745]: I1208 00:13:48.934842 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:49 crc kubenswrapper[4745]: I1208 00:13:49.092496 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:49 crc kubenswrapper[4745]: I1208 00:13:49.092635 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:49 crc kubenswrapper[4745]: I1208 00:13:49.159838 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:49 crc kubenswrapper[4745]: I1208 00:13:49.813812 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xlt2t" Dec 08 00:13:50 crc kubenswrapper[4745]: I1208 00:13:50.829369 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:13:51 crc kubenswrapper[4745]: I1208 00:13:51.417485 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-r57vv" Dec 08 00:13:51 crc kubenswrapper[4745]: I1208 00:13:51.538835 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6mg7"] Dec 08 00:13:52 crc kubenswrapper[4745]: I1208 00:13:52.460557 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:13:52 crc kubenswrapper[4745]: I1208 00:13:52.461107 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:13:52 crc kubenswrapper[4745]: I1208 00:13:52.461181 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:13:52 crc kubenswrapper[4745]: I1208 00:13:52.462091 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f8980071a87957e128867c229c023064db213f93114f668042757c6e5b0c70b0"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:13:52 crc kubenswrapper[4745]: I1208 00:13:52.462210 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://f8980071a87957e128867c229c023064db213f93114f668042757c6e5b0c70b0" gracePeriod=600 Dec 08 00:13:53 crc kubenswrapper[4745]: I1208 00:13:53.769266 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="f8980071a87957e128867c229c023064db213f93114f668042757c6e5b0c70b0" exitCode=0 Dec 08 00:13:53 crc kubenswrapper[4745]: I1208 00:13:53.769348 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"f8980071a87957e128867c229c023064db213f93114f668042757c6e5b0c70b0"} Dec 08 00:13:53 crc kubenswrapper[4745]: I1208 00:13:53.769473 4745 scope.go:117] "RemoveContainer" containerID="1507c2a8cb581ae65a03ed22332d7e70208dd833e70dcb0aacfe6fd7d8c7356e" Dec 08 00:13:55 crc kubenswrapper[4745]: I1208 00:13:55.784734 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"d8c0de7b78040ece60c672aa32dba210e3b388298ab73c3146737e606588c30f"} Dec 08 00:14:16 crc kubenswrapper[4745]: I1208 00:14:16.578521 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" podUID="1c9c3543-96ff-4a6f-9499-95bd43aa7368" containerName="registry" containerID="cri-o://82f83256dd32d6fb3d1078e9213ffc05a6a52d25a4c48b639fede29f66d087a2" gracePeriod=30 Dec 08 00:14:16 crc kubenswrapper[4745]: I1208 00:14:16.919485 4745 generic.go:334] "Generic (PLEG): container finished" podID="1c9c3543-96ff-4a6f-9499-95bd43aa7368" containerID="82f83256dd32d6fb3d1078e9213ffc05a6a52d25a4c48b639fede29f66d087a2" exitCode=0 Dec 08 00:14:16 crc kubenswrapper[4745]: I1208 00:14:16.919530 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" event={"ID":"1c9c3543-96ff-4a6f-9499-95bd43aa7368","Type":"ContainerDied","Data":"82f83256dd32d6fb3d1078e9213ffc05a6a52d25a4c48b639fede29f66d087a2"} Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.782290 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791457 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82qq5\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-kube-api-access-82qq5\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791521 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-tls\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791555 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-certificates\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791636 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1c9c3543-96ff-4a6f-9499-95bd43aa7368-ca-trust-extracted\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791782 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791809 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-trusted-ca\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791837 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-bound-sa-token\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.791867 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1c9c3543-96ff-4a6f-9499-95bd43aa7368-installation-pull-secrets\") pod \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\" (UID: \"1c9c3543-96ff-4a6f-9499-95bd43aa7368\") " Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.792625 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.792676 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.793244 4745 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.793270 4745 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c9c3543-96ff-4a6f-9499-95bd43aa7368-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.797753 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.798003 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.799239 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-kube-api-access-82qq5" (OuterVolumeSpecName: "kube-api-access-82qq5") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "kube-api-access-82qq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.802077 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.802194 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c9c3543-96ff-4a6f-9499-95bd43aa7368-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.816033 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c9c3543-96ff-4a6f-9499-95bd43aa7368-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "1c9c3543-96ff-4a6f-9499-95bd43aa7368" (UID: "1c9c3543-96ff-4a6f-9499-95bd43aa7368"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.894644 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82qq5\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-kube-api-access-82qq5\") on node \"crc\" DevicePath \"\"" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.894696 4745 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.894715 4745 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1c9c3543-96ff-4a6f-9499-95bd43aa7368-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.894732 4745 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1c9c3543-96ff-4a6f-9499-95bd43aa7368-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.894749 4745 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1c9c3543-96ff-4a6f-9499-95bd43aa7368-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.928732 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" event={"ID":"1c9c3543-96ff-4a6f-9499-95bd43aa7368","Type":"ContainerDied","Data":"ad8677310aabdbadb468a5c190aa1005302a2e8b321b5f161441e0ef2d702c5e"} Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.928781 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l6mg7" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.929152 4745 scope.go:117] "RemoveContainer" containerID="82f83256dd32d6fb3d1078e9213ffc05a6a52d25a4c48b639fede29f66d087a2" Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.969034 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6mg7"] Dec 08 00:14:17 crc kubenswrapper[4745]: I1208 00:14:17.975325 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l6mg7"] Dec 08 00:14:18 crc kubenswrapper[4745]: I1208 00:14:18.894854 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c9c3543-96ff-4a6f-9499-95bd43aa7368" path="/var/lib/kubelet/pods/1c9c3543-96ff-4a6f-9499-95bd43aa7368/volumes" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.166869 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55"] Dec 08 00:15:00 crc kubenswrapper[4745]: E1208 00:15:00.167526 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c9c3543-96ff-4a6f-9499-95bd43aa7368" containerName="registry" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.167538 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c9c3543-96ff-4a6f-9499-95bd43aa7368" containerName="registry" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.167630 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c9c3543-96ff-4a6f-9499-95bd43aa7368" containerName="registry" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.168001 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.170077 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.174139 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.179352 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55"] Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.288449 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz9q7\" (UniqueName: \"kubernetes.io/projected/9c2837bc-3518-471e-aa16-b1d2373ecb68-kube-api-access-bz9q7\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.288503 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c2837bc-3518-471e-aa16-b1d2373ecb68-config-volume\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.288557 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c2837bc-3518-471e-aa16-b1d2373ecb68-secret-volume\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.389809 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c2837bc-3518-471e-aa16-b1d2373ecb68-secret-volume\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.389962 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz9q7\" (UniqueName: \"kubernetes.io/projected/9c2837bc-3518-471e-aa16-b1d2373ecb68-kube-api-access-bz9q7\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.390030 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c2837bc-3518-471e-aa16-b1d2373ecb68-config-volume\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.391716 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c2837bc-3518-471e-aa16-b1d2373ecb68-config-volume\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.396863 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c2837bc-3518-471e-aa16-b1d2373ecb68-secret-volume\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.414065 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz9q7\" (UniqueName: \"kubernetes.io/projected/9c2837bc-3518-471e-aa16-b1d2373ecb68-kube-api-access-bz9q7\") pod \"collect-profiles-29419215-j4c55\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.488426 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:00 crc kubenswrapper[4745]: I1208 00:15:00.897675 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55"] Dec 08 00:15:00 crc kubenswrapper[4745]: W1208 00:15:00.910238 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c2837bc_3518_471e_aa16_b1d2373ecb68.slice/crio-cdd0d0b72a9531352a74f11efa93d206f95e4e97bd415f27706486c711c4f6d3 WatchSource:0}: Error finding container cdd0d0b72a9531352a74f11efa93d206f95e4e97bd415f27706486c711c4f6d3: Status 404 returned error can't find the container with id cdd0d0b72a9531352a74f11efa93d206f95e4e97bd415f27706486c711c4f6d3 Dec 08 00:15:01 crc kubenswrapper[4745]: I1208 00:15:01.190690 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" event={"ID":"9c2837bc-3518-471e-aa16-b1d2373ecb68","Type":"ContainerStarted","Data":"cdd0d0b72a9531352a74f11efa93d206f95e4e97bd415f27706486c711c4f6d3"} Dec 08 00:15:02 crc kubenswrapper[4745]: I1208 00:15:02.197845 4745 generic.go:334] "Generic (PLEG): container finished" podID="9c2837bc-3518-471e-aa16-b1d2373ecb68" containerID="b26cea8c65900a3c6daedaca28ce49ef65cc8dcb0fa74f32f74edc1355f59e1e" exitCode=0 Dec 08 00:15:02 crc kubenswrapper[4745]: I1208 00:15:02.197915 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" event={"ID":"9c2837bc-3518-471e-aa16-b1d2373ecb68","Type":"ContainerDied","Data":"b26cea8c65900a3c6daedaca28ce49ef65cc8dcb0fa74f32f74edc1355f59e1e"} Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.497043 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.629449 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz9q7\" (UniqueName: \"kubernetes.io/projected/9c2837bc-3518-471e-aa16-b1d2373ecb68-kube-api-access-bz9q7\") pod \"9c2837bc-3518-471e-aa16-b1d2373ecb68\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.629629 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c2837bc-3518-471e-aa16-b1d2373ecb68-secret-volume\") pod \"9c2837bc-3518-471e-aa16-b1d2373ecb68\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.629687 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c2837bc-3518-471e-aa16-b1d2373ecb68-config-volume\") pod \"9c2837bc-3518-471e-aa16-b1d2373ecb68\" (UID: \"9c2837bc-3518-471e-aa16-b1d2373ecb68\") " Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.630630 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c2837bc-3518-471e-aa16-b1d2373ecb68-config-volume" (OuterVolumeSpecName: "config-volume") pod "9c2837bc-3518-471e-aa16-b1d2373ecb68" (UID: "9c2837bc-3518-471e-aa16-b1d2373ecb68"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.634488 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c2837bc-3518-471e-aa16-b1d2373ecb68-kube-api-access-bz9q7" (OuterVolumeSpecName: "kube-api-access-bz9q7") pod "9c2837bc-3518-471e-aa16-b1d2373ecb68" (UID: "9c2837bc-3518-471e-aa16-b1d2373ecb68"). InnerVolumeSpecName "kube-api-access-bz9q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.634618 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c2837bc-3518-471e-aa16-b1d2373ecb68-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9c2837bc-3518-471e-aa16-b1d2373ecb68" (UID: "9c2837bc-3518-471e-aa16-b1d2373ecb68"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.731047 4745 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9c2837bc-3518-471e-aa16-b1d2373ecb68-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.731095 4745 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9c2837bc-3518-471e-aa16-b1d2373ecb68-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:15:03 crc kubenswrapper[4745]: I1208 00:15:03.731109 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz9q7\" (UniqueName: \"kubernetes.io/projected/9c2837bc-3518-471e-aa16-b1d2373ecb68-kube-api-access-bz9q7\") on node \"crc\" DevicePath \"\"" Dec 08 00:15:04 crc kubenswrapper[4745]: I1208 00:15:04.217001 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" event={"ID":"9c2837bc-3518-471e-aa16-b1d2373ecb68","Type":"ContainerDied","Data":"cdd0d0b72a9531352a74f11efa93d206f95e4e97bd415f27706486c711c4f6d3"} Dec 08 00:15:04 crc kubenswrapper[4745]: I1208 00:15:04.217387 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdd0d0b72a9531352a74f11efa93d206f95e4e97bd415f27706486c711c4f6d3" Dec 08 00:15:04 crc kubenswrapper[4745]: I1208 00:15:04.217061 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419215-j4c55" Dec 08 00:16:22 crc kubenswrapper[4745]: I1208 00:16:22.460653 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:16:22 crc kubenswrapper[4745]: I1208 00:16:22.461298 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:16:52 crc kubenswrapper[4745]: I1208 00:16:52.460353 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:16:52 crc kubenswrapper[4745]: I1208 00:16:52.460968 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:17:22 crc kubenswrapper[4745]: I1208 00:17:22.461314 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:17:22 crc kubenswrapper[4745]: I1208 00:17:22.462209 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:17:22 crc kubenswrapper[4745]: I1208 00:17:22.462287 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:17:22 crc kubenswrapper[4745]: I1208 00:17:22.463152 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d8c0de7b78040ece60c672aa32dba210e3b388298ab73c3146737e606588c30f"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:17:22 crc kubenswrapper[4745]: I1208 00:17:22.463255 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://d8c0de7b78040ece60c672aa32dba210e3b388298ab73c3146737e606588c30f" gracePeriod=600 Dec 08 00:17:23 crc kubenswrapper[4745]: I1208 00:17:23.105743 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="d8c0de7b78040ece60c672aa32dba210e3b388298ab73c3146737e606588c30f" exitCode=0 Dec 08 00:17:23 crc kubenswrapper[4745]: I1208 00:17:23.105820 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"d8c0de7b78040ece60c672aa32dba210e3b388298ab73c3146737e606588c30f"} Dec 08 00:17:23 crc kubenswrapper[4745]: I1208 00:17:23.106144 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"d68e50e68d0ae8a6e03f26cfdb8cf98d132e8e3ab3e913de8377758729efd13e"} Dec 08 00:17:23 crc kubenswrapper[4745]: I1208 00:17:23.106174 4745 scope.go:117] "RemoveContainer" containerID="f8980071a87957e128867c229c023064db213f93114f668042757c6e5b0c70b0" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.513139 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5c9xn"] Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.514145 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-controller" containerID="cri-o://d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.514545 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="sbdb" containerID="cri-o://ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.514597 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="nbdb" containerID="cri-o://cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.514643 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="northd" containerID="cri-o://5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.514677 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.514719 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-node" containerID="cri-o://16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.514759 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-acl-logging" containerID="cri-o://5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.562668 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" containerID="cri-o://ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" gracePeriod=30 Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.772784 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/3.log" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.780946 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovn-acl-logging/0.log" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.783056 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovn-controller/0.log" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.783846 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838529 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-drwqf"] Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838769 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838783 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838797 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838807 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838818 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-node" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838827 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-node" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838836 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="sbdb" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838844 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="sbdb" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838855 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838863 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838878 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838886 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838901 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="nbdb" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838909 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="nbdb" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838919 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="northd" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838944 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="northd" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838958 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c2837bc-3518-471e-aa16-b1d2373ecb68" containerName="collect-profiles" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838966 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c2837bc-3518-471e-aa16-b1d2373ecb68" containerName="collect-profiles" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838975 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.838985 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.838995 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-acl-logging" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839003 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-acl-logging" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.839014 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kubecfg-setup" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839022 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kubecfg-setup" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839164 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="sbdb" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839179 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839187 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839199 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839208 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="nbdb" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839219 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839228 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-ovn-metrics" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839238 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839251 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c2837bc-3518-471e-aa16-b1d2373ecb68" containerName="collect-profiles" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839262 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="northd" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839272 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="kube-rbac-proxy-node" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839282 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovn-acl-logging" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.839401 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839411 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839630 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: E1208 00:18:05.839754 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.839764 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerName="ovnkube-controller" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.841739 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915051 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-systemd-units\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915099 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-node-log\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915137 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-config\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915172 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-var-lib-openvswitch\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915206 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-slash\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915234 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-systemd\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915257 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-var-lib-cni-networks-ovn-kubernetes\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915286 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovn-node-metrics-cert\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915311 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-ovn\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915337 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-netns\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915226 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-node-log" (OuterVolumeSpecName: "node-log") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915253 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915399 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-log-socket" (OuterVolumeSpecName: "log-socket") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915244 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915302 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-slash" (OuterVolumeSpecName: "host-slash") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915349 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915431 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915470 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915368 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-log-socket\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915616 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-kubelet\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915652 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-bin\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915702 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-etc-openvswitch\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915738 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-env-overrides\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915741 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915780 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915780 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-ovn-kubernetes\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915814 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915817 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-netd\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915839 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915863 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915864 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-openvswitch\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915896 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915901 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb9zl\" (UniqueName: \"kubernetes.io/projected/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-kube-api-access-xb9zl\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915978 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-script-lib\") pod \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\" (UID: \"1fc4e04e-a6e2-4897-9549-d7517e1ac92b\") " Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916485 4745 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916507 4745 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-slash\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916527 4745 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916548 4745 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916567 4745 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916584 4745 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-log-socket\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916601 4745 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916619 4745 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916638 4745 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916656 4745 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916674 4745 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916691 4745 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916709 4745 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-node-log\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916726 4745 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.915977 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916303 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.916539 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.920746 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.922067 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-kube-api-access-xb9zl" (OuterVolumeSpecName: "kube-api-access-xb9zl") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "kube-api-access-xb9zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:18:05 crc kubenswrapper[4745]: I1208 00:18:05.928883 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "1fc4e04e-a6e2-4897-9549-d7517e1ac92b" (UID: "1fc4e04e-a6e2-4897-9549-d7517e1ac92b"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017584 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-cni-netd\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017649 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-kubelet\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017672 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgnt9\" (UniqueName: \"kubernetes.io/projected/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-kube-api-access-hgnt9\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017701 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-systemd-units\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017720 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017765 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-run-ovn-kubernetes\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017786 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-cni-bin\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017879 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovn-node-metrics-cert\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.017979 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-env-overrides\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018030 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018127 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-systemd\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018186 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-ovn\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018229 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovnkube-script-lib\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018268 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-slash\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018348 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovnkube-config\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018396 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-node-log\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018434 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-etc-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018488 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-var-lib-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018520 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-run-netns\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018549 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-log-socket\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018620 4745 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018643 4745 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018666 4745 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018684 4745 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018705 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb9zl\" (UniqueName: \"kubernetes.io/projected/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-kube-api-access-xb9zl\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.018724 4745 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1fc4e04e-a6e2-4897-9549-d7517e1ac92b-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120056 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-slash\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120138 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovnkube-config\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120212 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-node-log\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120217 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-slash\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120263 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-etc-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120344 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-etc-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120380 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-var-lib-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120403 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-node-log\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120450 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-run-netns\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120483 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-log-socket\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120508 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-log-socket\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120481 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-var-lib-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120524 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-cni-netd\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120540 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-run-netns\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120549 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-cni-netd\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120578 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-kubelet\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120622 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-kubelet\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120645 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgnt9\" (UniqueName: \"kubernetes.io/projected/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-kube-api-access-hgnt9\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120707 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-systemd-units\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120752 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120826 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-run-ovn-kubernetes\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120842 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-systemd-units\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120873 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-cni-bin\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120893 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-openvswitch\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120918 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-run-ovn-kubernetes\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120972 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovn-node-metrics-cert\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.120998 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-cni-bin\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121186 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-env-overrides\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121261 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121312 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-systemd\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121348 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-ovn\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121393 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovnkube-script-lib\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121397 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121405 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-systemd\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.121564 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-run-ovn\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.122090 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovnkube-config\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.122215 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovnkube-script-lib\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.122339 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-env-overrides\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.125966 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-ovn-node-metrics-cert\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.153892 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgnt9\" (UniqueName: \"kubernetes.io/projected/c45e1e65-f1e2-4c6c-8a88-a0ca897aa400-kube-api-access-hgnt9\") pod \"ovnkube-node-drwqf\" (UID: \"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400\") " pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.155525 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.393234 4745 generic.go:334] "Generic (PLEG): container finished" podID="c45e1e65-f1e2-4c6c-8a88-a0ca897aa400" containerID="929a80cb4fc58dd79a268ba61665569645a568d23218dc0d9d43d823afb5e961" exitCode=0 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.393281 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerDied","Data":"929a80cb4fc58dd79a268ba61665569645a568d23218dc0d9d43d823afb5e961"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.393345 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"cbd9e3e0c7ba8c878913ae8ffbd0905312b423c2f47609584bfbc0a69b8b2510"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.400567 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/2.log" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.401082 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/1.log" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.401129 4745 generic.go:334] "Generic (PLEG): container finished" podID="73d47ce8-04b5-4dba-aa14-655581a103a8" containerID="84ff2184bdcecf225a8ccfd25d0b2e058a82360914b45b7dfc646209f975032f" exitCode=2 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.401201 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerDied","Data":"84ff2184bdcecf225a8ccfd25d0b2e058a82360914b45b7dfc646209f975032f"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.401288 4745 scope.go:117] "RemoveContainer" containerID="aa379390948ecfc2220e8bb11d770d2faf0844a35bbe0684954d611d567a4a88" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.401905 4745 scope.go:117] "RemoveContainer" containerID="84ff2184bdcecf225a8ccfd25d0b2e058a82360914b45b7dfc646209f975032f" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.402177 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pk459_openshift-multus(73d47ce8-04b5-4dba-aa14-655581a103a8)\"" pod="openshift-multus/multus-pk459" podUID="73d47ce8-04b5-4dba-aa14-655581a103a8" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.404441 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovnkube-controller/3.log" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.407112 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovn-acl-logging/0.log" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.407615 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5c9xn_1fc4e04e-a6e2-4897-9549-d7517e1ac92b/ovn-controller/0.log" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408092 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" exitCode=0 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408120 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" exitCode=0 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408163 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" exitCode=0 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408178 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" exitCode=0 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408190 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" exitCode=0 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408202 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" exitCode=0 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408248 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" exitCode=143 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408264 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" containerID="d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" exitCode=143 Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408338 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408403 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408430 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408450 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408470 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408489 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408508 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408551 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408563 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408573 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408583 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408593 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408602 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408612 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408621 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408630 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408644 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408660 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408707 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408723 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408734 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408745 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408756 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408766 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408776 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408786 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408796 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408811 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408828 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408840 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408850 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408859 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408869 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408878 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408888 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408898 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408907 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408917 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408962 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" event={"ID":"1fc4e04e-a6e2-4897-9549-d7517e1ac92b","Type":"ContainerDied","Data":"d1ddae1e19a2add4afb385b550adeb13712a3a5db0750a4985c52406d83581e3"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408981 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.408992 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409002 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409012 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409021 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409031 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409041 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409050 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409061 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409071 4745 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.409199 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5c9xn" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.427595 4745 scope.go:117] "RemoveContainer" containerID="ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.454054 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5c9xn"] Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.458505 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5c9xn"] Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.461018 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.497375 4745 scope.go:117] "RemoveContainer" containerID="ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.514688 4745 scope.go:117] "RemoveContainer" containerID="cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.529470 4745 scope.go:117] "RemoveContainer" containerID="5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.542544 4745 scope.go:117] "RemoveContainer" containerID="b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.598018 4745 scope.go:117] "RemoveContainer" containerID="16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.611000 4745 scope.go:117] "RemoveContainer" containerID="5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.685017 4745 scope.go:117] "RemoveContainer" containerID="d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.707733 4745 scope.go:117] "RemoveContainer" containerID="5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.728287 4745 scope.go:117] "RemoveContainer" containerID="ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.728839 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": container with ID starting with ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac not found: ID does not exist" containerID="ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.728907 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} err="failed to get container status \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": rpc error: code = NotFound desc = could not find container \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": container with ID starting with ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.728959 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.729474 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": container with ID starting with 1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104 not found: ID does not exist" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.729556 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} err="failed to get container status \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": rpc error: code = NotFound desc = could not find container \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": container with ID starting with 1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.729577 4745 scope.go:117] "RemoveContainer" containerID="ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.730176 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": container with ID starting with ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24 not found: ID does not exist" containerID="ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.730263 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} err="failed to get container status \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": rpc error: code = NotFound desc = could not find container \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": container with ID starting with ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.730312 4745 scope.go:117] "RemoveContainer" containerID="cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.730703 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": container with ID starting with cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9 not found: ID does not exist" containerID="cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.730735 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} err="failed to get container status \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": rpc error: code = NotFound desc = could not find container \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": container with ID starting with cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.730759 4745 scope.go:117] "RemoveContainer" containerID="5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.731057 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": container with ID starting with 5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4 not found: ID does not exist" containerID="5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.731088 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} err="failed to get container status \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": rpc error: code = NotFound desc = could not find container \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": container with ID starting with 5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.731332 4745 scope.go:117] "RemoveContainer" containerID="b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.731604 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": container with ID starting with b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b not found: ID does not exist" containerID="b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.731635 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} err="failed to get container status \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": rpc error: code = NotFound desc = could not find container \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": container with ID starting with b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.731656 4745 scope.go:117] "RemoveContainer" containerID="16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.732163 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": container with ID starting with 16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59 not found: ID does not exist" containerID="16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.732214 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} err="failed to get container status \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": rpc error: code = NotFound desc = could not find container \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": container with ID starting with 16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.732233 4745 scope.go:117] "RemoveContainer" containerID="5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.733264 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": container with ID starting with 5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079 not found: ID does not exist" containerID="5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.733343 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} err="failed to get container status \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": rpc error: code = NotFound desc = could not find container \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": container with ID starting with 5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.733362 4745 scope.go:117] "RemoveContainer" containerID="d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.734025 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": container with ID starting with d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c not found: ID does not exist" containerID="d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.734060 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} err="failed to get container status \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": rpc error: code = NotFound desc = could not find container \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": container with ID starting with d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.734086 4745 scope.go:117] "RemoveContainer" containerID="5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11" Dec 08 00:18:06 crc kubenswrapper[4745]: E1208 00:18:06.734421 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": container with ID starting with 5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11 not found: ID does not exist" containerID="5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.734473 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} err="failed to get container status \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": rpc error: code = NotFound desc = could not find container \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": container with ID starting with 5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.734490 4745 scope.go:117] "RemoveContainer" containerID="ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.734743 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} err="failed to get container status \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": rpc error: code = NotFound desc = could not find container \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": container with ID starting with ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.734764 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.734997 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} err="failed to get container status \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": rpc error: code = NotFound desc = could not find container \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": container with ID starting with 1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.735069 4745 scope.go:117] "RemoveContainer" containerID="ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.735302 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} err="failed to get container status \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": rpc error: code = NotFound desc = could not find container \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": container with ID starting with ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.735344 4745 scope.go:117] "RemoveContainer" containerID="cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.735565 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} err="failed to get container status \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": rpc error: code = NotFound desc = could not find container \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": container with ID starting with cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.735595 4745 scope.go:117] "RemoveContainer" containerID="5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.735906 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} err="failed to get container status \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": rpc error: code = NotFound desc = could not find container \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": container with ID starting with 5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.735960 4745 scope.go:117] "RemoveContainer" containerID="b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.736251 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} err="failed to get container status \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": rpc error: code = NotFound desc = could not find container \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": container with ID starting with b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.736284 4745 scope.go:117] "RemoveContainer" containerID="16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.736605 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} err="failed to get container status \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": rpc error: code = NotFound desc = could not find container \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": container with ID starting with 16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.736642 4745 scope.go:117] "RemoveContainer" containerID="5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.736901 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} err="failed to get container status \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": rpc error: code = NotFound desc = could not find container \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": container with ID starting with 5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.736950 4745 scope.go:117] "RemoveContainer" containerID="d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.737302 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} err="failed to get container status \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": rpc error: code = NotFound desc = could not find container \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": container with ID starting with d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.737344 4745 scope.go:117] "RemoveContainer" containerID="5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.737582 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} err="failed to get container status \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": rpc error: code = NotFound desc = could not find container \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": container with ID starting with 5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.737615 4745 scope.go:117] "RemoveContainer" containerID="ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.737856 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} err="failed to get container status \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": rpc error: code = NotFound desc = could not find container \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": container with ID starting with ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.737900 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.738170 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} err="failed to get container status \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": rpc error: code = NotFound desc = could not find container \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": container with ID starting with 1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.738198 4745 scope.go:117] "RemoveContainer" containerID="ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.738453 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} err="failed to get container status \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": rpc error: code = NotFound desc = could not find container \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": container with ID starting with ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.738495 4745 scope.go:117] "RemoveContainer" containerID="cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.738729 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} err="failed to get container status \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": rpc error: code = NotFound desc = could not find container \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": container with ID starting with cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.738759 4745 scope.go:117] "RemoveContainer" containerID="5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739044 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} err="failed to get container status \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": rpc error: code = NotFound desc = could not find container \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": container with ID starting with 5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739084 4745 scope.go:117] "RemoveContainer" containerID="b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739323 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} err="failed to get container status \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": rpc error: code = NotFound desc = could not find container \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": container with ID starting with b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739354 4745 scope.go:117] "RemoveContainer" containerID="16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739614 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} err="failed to get container status \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": rpc error: code = NotFound desc = could not find container \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": container with ID starting with 16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739649 4745 scope.go:117] "RemoveContainer" containerID="5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739870 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} err="failed to get container status \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": rpc error: code = NotFound desc = could not find container \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": container with ID starting with 5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.739900 4745 scope.go:117] "RemoveContainer" containerID="d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740169 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} err="failed to get container status \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": rpc error: code = NotFound desc = could not find container \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": container with ID starting with d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740188 4745 scope.go:117] "RemoveContainer" containerID="5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740446 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} err="failed to get container status \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": rpc error: code = NotFound desc = could not find container \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": container with ID starting with 5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740465 4745 scope.go:117] "RemoveContainer" containerID="ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740671 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac"} err="failed to get container status \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": rpc error: code = NotFound desc = could not find container \"ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac\": container with ID starting with ac0e6b21745f649e844e44d808a745c95eb54ff0e5a8c59c91e4e1b2a88e90ac not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740690 4745 scope.go:117] "RemoveContainer" containerID="1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740964 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104"} err="failed to get container status \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": rpc error: code = NotFound desc = could not find container \"1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104\": container with ID starting with 1ec3ee581143162fdd498774ec7eeaf08972a0be93839146589def3837389104 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.740989 4745 scope.go:117] "RemoveContainer" containerID="ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.741293 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24"} err="failed to get container status \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": rpc error: code = NotFound desc = could not find container \"ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24\": container with ID starting with ecad241e1be6fb2c9658fc98c0c15960df642883392ca0a26f7e336782e87e24 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.741357 4745 scope.go:117] "RemoveContainer" containerID="cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.741643 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9"} err="failed to get container status \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": rpc error: code = NotFound desc = could not find container \"cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9\": container with ID starting with cb236b4ee7030351cd996df197fb8f61e96382e7762c743f07926ffb0cef5fa9 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.741661 4745 scope.go:117] "RemoveContainer" containerID="5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.741891 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4"} err="failed to get container status \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": rpc error: code = NotFound desc = could not find container \"5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4\": container with ID starting with 5fda569ed17af681d61a2ad99c90b7bd272caad92d4200e64cb3944aad3395e4 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.741910 4745 scope.go:117] "RemoveContainer" containerID="b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.742181 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b"} err="failed to get container status \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": rpc error: code = NotFound desc = could not find container \"b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b\": container with ID starting with b1d7a4f254e888f186bf5af66fc746dec0400fcf32d6fcf5470c31f96e53594b not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.742199 4745 scope.go:117] "RemoveContainer" containerID="16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.742417 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59"} err="failed to get container status \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": rpc error: code = NotFound desc = could not find container \"16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59\": container with ID starting with 16cab5877d47a52c314a1635b6afe12fc3de1133fa352ab4c868ba68e3587a59 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.742437 4745 scope.go:117] "RemoveContainer" containerID="5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.742704 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079"} err="failed to get container status \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": rpc error: code = NotFound desc = could not find container \"5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079\": container with ID starting with 5ebb11aaeea7a02a15cbdd825794fcf06096ee0068b753084a8649c288a34079 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.742759 4745 scope.go:117] "RemoveContainer" containerID="d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.743393 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c"} err="failed to get container status \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": rpc error: code = NotFound desc = could not find container \"d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c\": container with ID starting with d0c6f7f10d8aaf1a9bd7cd78fcf52c2806b02861b30198aefd76727f52b6e13c not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.743415 4745 scope.go:117] "RemoveContainer" containerID="5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.743606 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11"} err="failed to get container status \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": rpc error: code = NotFound desc = could not find container \"5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11\": container with ID starting with 5602e4aac82ed8f1753452626ae01f2cdb30887384a50cbf4b5f0e509d968d11 not found: ID does not exist" Dec 08 00:18:06 crc kubenswrapper[4745]: I1208 00:18:06.901257 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fc4e04e-a6e2-4897-9549-d7517e1ac92b" path="/var/lib/kubelet/pods/1fc4e04e-a6e2-4897-9549-d7517e1ac92b/volumes" Dec 08 00:18:07 crc kubenswrapper[4745]: I1208 00:18:07.420981 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"2cf61d8c6a7714bcb990e770443cbe49e01949a8ae45e437fc1d3eb602d6d3a1"} Dec 08 00:18:07 crc kubenswrapper[4745]: I1208 00:18:07.421632 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"9f222236262a26db71a70f2a3e7823e52cfff42029aac46eceb4abdcf9fd76b5"} Dec 08 00:18:07 crc kubenswrapper[4745]: I1208 00:18:07.424166 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/2.log" Dec 08 00:18:08 crc kubenswrapper[4745]: I1208 00:18:08.431644 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"0d38faf781dead3b1b38f22a298b000655aa98f7566222487c4e10e731dec43d"} Dec 08 00:18:08 crc kubenswrapper[4745]: I1208 00:18:08.431690 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"ec1f980301765249a99ffdab22828dd850f6b601579744135d5dc84dc33faa14"} Dec 08 00:18:09 crc kubenswrapper[4745]: I1208 00:18:09.443229 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"eb12cb8b0d478f03b0ca851fba77eeb762812f9c4dfdf9498270db169363272f"} Dec 08 00:18:09 crc kubenswrapper[4745]: I1208 00:18:09.443798 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"998e9c146e131c8ca6352b2fd8e9a44ab21b90c5b807e1028e676d840c153520"} Dec 08 00:18:11 crc kubenswrapper[4745]: I1208 00:18:11.458532 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"5f374373b202584199daa10974240499d82dfc5c613de6232682b20853acb506"} Dec 08 00:18:13 crc kubenswrapper[4745]: I1208 00:18:13.474044 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" event={"ID":"c45e1e65-f1e2-4c6c-8a88-a0ca897aa400","Type":"ContainerStarted","Data":"7af539d6dada39152ab5fd17cf61c3e1d0e0580c175b6a46a4db017b454fbd34"} Dec 08 00:18:13 crc kubenswrapper[4745]: I1208 00:18:13.474329 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:13 crc kubenswrapper[4745]: I1208 00:18:13.474486 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:13 crc kubenswrapper[4745]: I1208 00:18:13.555806 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" podStartSLOduration=8.555787885 podStartE2EDuration="8.555787885s" podCreationTimestamp="2025-12-08 00:18:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:18:13.554319165 +0000 UTC m=+648.983525495" watchObservedRunningTime="2025-12-08 00:18:13.555787885 +0000 UTC m=+648.984994185" Dec 08 00:18:13 crc kubenswrapper[4745]: I1208 00:18:13.561279 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:14 crc kubenswrapper[4745]: I1208 00:18:14.478618 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:14 crc kubenswrapper[4745]: I1208 00:18:14.504406 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:18:18 crc kubenswrapper[4745]: I1208 00:18:18.883472 4745 scope.go:117] "RemoveContainer" containerID="84ff2184bdcecf225a8ccfd25d0b2e058a82360914b45b7dfc646209f975032f" Dec 08 00:18:18 crc kubenswrapper[4745]: E1208 00:18:18.884247 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pk459_openshift-multus(73d47ce8-04b5-4dba-aa14-655581a103a8)\"" pod="openshift-multus/multus-pk459" podUID="73d47ce8-04b5-4dba-aa14-655581a103a8" Dec 08 00:18:33 crc kubenswrapper[4745]: I1208 00:18:33.882906 4745 scope.go:117] "RemoveContainer" containerID="84ff2184bdcecf225a8ccfd25d0b2e058a82360914b45b7dfc646209f975032f" Dec 08 00:18:34 crc kubenswrapper[4745]: I1208 00:18:34.618861 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pk459_73d47ce8-04b5-4dba-aa14-655581a103a8/kube-multus/2.log" Dec 08 00:18:34 crc kubenswrapper[4745]: I1208 00:18:34.619439 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pk459" event={"ID":"73d47ce8-04b5-4dba-aa14-655581a103a8","Type":"ContainerStarted","Data":"cd68534b2e7137db8c4256b3299dea421d34287892c85cbce48c6a65b34cd7ed"} Dec 08 00:18:36 crc kubenswrapper[4745]: I1208 00:18:36.186031 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-drwqf" Dec 08 00:19:15 crc kubenswrapper[4745]: I1208 00:19:15.629190 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-64bk6"] Dec 08 00:19:15 crc kubenswrapper[4745]: I1208 00:19:15.634354 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-64bk6" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="registry-server" containerID="cri-o://3cc814b67b16f8e3acbf4d5f8014c3c387b6cd93cc42a5082762509be99b1753" gracePeriod=30 Dec 08 00:19:15 crc kubenswrapper[4745]: I1208 00:19:15.866704 4745 generic.go:334] "Generic (PLEG): container finished" podID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerID="3cc814b67b16f8e3acbf4d5f8014c3c387b6cd93cc42a5082762509be99b1753" exitCode=0 Dec 08 00:19:15 crc kubenswrapper[4745]: I1208 00:19:15.866747 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64bk6" event={"ID":"09f4c527-282b-4f8e-98ff-3e3032ab2896","Type":"ContainerDied","Data":"3cc814b67b16f8e3acbf4d5f8014c3c387b6cd93cc42a5082762509be99b1753"} Dec 08 00:19:15 crc kubenswrapper[4745]: I1208 00:19:15.966774 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.092417 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-catalog-content\") pod \"09f4c527-282b-4f8e-98ff-3e3032ab2896\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.092859 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2d9m\" (UniqueName: \"kubernetes.io/projected/09f4c527-282b-4f8e-98ff-3e3032ab2896-kube-api-access-f2d9m\") pod \"09f4c527-282b-4f8e-98ff-3e3032ab2896\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.093207 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-utilities\") pod \"09f4c527-282b-4f8e-98ff-3e3032ab2896\" (UID: \"09f4c527-282b-4f8e-98ff-3e3032ab2896\") " Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.095499 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-utilities" (OuterVolumeSpecName: "utilities") pod "09f4c527-282b-4f8e-98ff-3e3032ab2896" (UID: "09f4c527-282b-4f8e-98ff-3e3032ab2896"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.100183 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09f4c527-282b-4f8e-98ff-3e3032ab2896-kube-api-access-f2d9m" (OuterVolumeSpecName: "kube-api-access-f2d9m") pod "09f4c527-282b-4f8e-98ff-3e3032ab2896" (UID: "09f4c527-282b-4f8e-98ff-3e3032ab2896"). InnerVolumeSpecName "kube-api-access-f2d9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.127824 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09f4c527-282b-4f8e-98ff-3e3032ab2896" (UID: "09f4c527-282b-4f8e-98ff-3e3032ab2896"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.195585 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.195896 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09f4c527-282b-4f8e-98ff-3e3032ab2896-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.196194 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2d9m\" (UniqueName: \"kubernetes.io/projected/09f4c527-282b-4f8e-98ff-3e3032ab2896-kube-api-access-f2d9m\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.874775 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64bk6" event={"ID":"09f4c527-282b-4f8e-98ff-3e3032ab2896","Type":"ContainerDied","Data":"735ebe81e043a9be62c5850d84c9fcf5c73bad89714fd3973dd17f41104c5d0e"} Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.874826 4745 scope.go:117] "RemoveContainer" containerID="3cc814b67b16f8e3acbf4d5f8014c3c387b6cd93cc42a5082762509be99b1753" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.876076 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64bk6" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.892984 4745 scope.go:117] "RemoveContainer" containerID="9f81aa358b7f64592ca99b9f284fdd8fac13caf0a7aeb0a3d350f7dee76f73ea" Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.905915 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-64bk6"] Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.909056 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-64bk6"] Dec 08 00:19:16 crc kubenswrapper[4745]: I1208 00:19:16.927914 4745 scope.go:117] "RemoveContainer" containerID="496e81b8072ff225b0704ea7f0de54d1d32bb16589e7c909e70d918734f2fc61" Dec 08 00:19:18 crc kubenswrapper[4745]: I1208 00:19:18.890183 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" path="/var/lib/kubelet/pods/09f4c527-282b-4f8e-98ff-3e3032ab2896/volumes" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.127234 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s"] Dec 08 00:19:19 crc kubenswrapper[4745]: E1208 00:19:19.127516 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="registry-server" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.127532 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="registry-server" Dec 08 00:19:19 crc kubenswrapper[4745]: E1208 00:19:19.127548 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="extract-utilities" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.127556 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="extract-utilities" Dec 08 00:19:19 crc kubenswrapper[4745]: E1208 00:19:19.127567 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="extract-content" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.127575 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="extract-content" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.127689 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="09f4c527-282b-4f8e-98ff-3e3032ab2896" containerName="registry-server" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.128716 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.131413 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.138600 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s"] Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.232811 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.232886 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.232955 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtlzh\" (UniqueName: \"kubernetes.io/projected/f2525385-23f5-47c8-aee0-d56f22c34f7e-kube-api-access-rtlzh\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.334310 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.334405 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtlzh\" (UniqueName: \"kubernetes.io/projected/f2525385-23f5-47c8-aee0-d56f22c34f7e-kube-api-access-rtlzh\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.334574 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.335364 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.335474 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.362206 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtlzh\" (UniqueName: \"kubernetes.io/projected/f2525385-23f5-47c8-aee0-d56f22c34f7e-kube-api-access-rtlzh\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.456757 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.704289 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s"] Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.897288 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" event={"ID":"f2525385-23f5-47c8-aee0-d56f22c34f7e","Type":"ContainerStarted","Data":"fa5560c41c2b5c057c0b24862f43da371f182b26c8bb55a39665f98a02d36cd4"} Dec 08 00:19:19 crc kubenswrapper[4745]: I1208 00:19:19.897350 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" event={"ID":"f2525385-23f5-47c8-aee0-d56f22c34f7e","Type":"ContainerStarted","Data":"9f1ac7413563993d05383005a7ff898204ec217ccfb671760ead15cac500a92b"} Dec 08 00:19:20 crc kubenswrapper[4745]: I1208 00:19:20.903720 4745 generic.go:334] "Generic (PLEG): container finished" podID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerID="fa5560c41c2b5c057c0b24862f43da371f182b26c8bb55a39665f98a02d36cd4" exitCode=0 Dec 08 00:19:20 crc kubenswrapper[4745]: I1208 00:19:20.903771 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" event={"ID":"f2525385-23f5-47c8-aee0-d56f22c34f7e","Type":"ContainerDied","Data":"fa5560c41c2b5c057c0b24862f43da371f182b26c8bb55a39665f98a02d36cd4"} Dec 08 00:19:20 crc kubenswrapper[4745]: I1208 00:19:20.909457 4745 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 00:19:21 crc kubenswrapper[4745]: I1208 00:19:21.913214 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" event={"ID":"f2525385-23f5-47c8-aee0-d56f22c34f7e","Type":"ContainerStarted","Data":"7170e51e91552c3be97059a58f6e44f521a277d8fb84e9d57b090132d15608c8"} Dec 08 00:19:22 crc kubenswrapper[4745]: I1208 00:19:22.461003 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:19:22 crc kubenswrapper[4745]: I1208 00:19:22.461424 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:19:22 crc kubenswrapper[4745]: I1208 00:19:22.919130 4745 generic.go:334] "Generic (PLEG): container finished" podID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerID="7170e51e91552c3be97059a58f6e44f521a277d8fb84e9d57b090132d15608c8" exitCode=0 Dec 08 00:19:22 crc kubenswrapper[4745]: I1208 00:19:22.919270 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" event={"ID":"f2525385-23f5-47c8-aee0-d56f22c34f7e","Type":"ContainerDied","Data":"7170e51e91552c3be97059a58f6e44f521a277d8fb84e9d57b090132d15608c8"} Dec 08 00:19:23 crc kubenswrapper[4745]: I1208 00:19:23.933650 4745 generic.go:334] "Generic (PLEG): container finished" podID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerID="5d57373779fe9b058b82946251dc5368559fff4d72a4966e4472a9770b0bf4d2" exitCode=0 Dec 08 00:19:23 crc kubenswrapper[4745]: I1208 00:19:23.933722 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" event={"ID":"f2525385-23f5-47c8-aee0-d56f22c34f7e","Type":"ContainerDied","Data":"5d57373779fe9b058b82946251dc5368559fff4d72a4966e4472a9770b0bf4d2"} Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.179388 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.213684 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-util\") pod \"f2525385-23f5-47c8-aee0-d56f22c34f7e\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.213804 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtlzh\" (UniqueName: \"kubernetes.io/projected/f2525385-23f5-47c8-aee0-d56f22c34f7e-kube-api-access-rtlzh\") pod \"f2525385-23f5-47c8-aee0-d56f22c34f7e\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.213902 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-bundle\") pod \"f2525385-23f5-47c8-aee0-d56f22c34f7e\" (UID: \"f2525385-23f5-47c8-aee0-d56f22c34f7e\") " Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.218478 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-bundle" (OuterVolumeSpecName: "bundle") pod "f2525385-23f5-47c8-aee0-d56f22c34f7e" (UID: "f2525385-23f5-47c8-aee0-d56f22c34f7e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.220540 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2525385-23f5-47c8-aee0-d56f22c34f7e-kube-api-access-rtlzh" (OuterVolumeSpecName: "kube-api-access-rtlzh") pod "f2525385-23f5-47c8-aee0-d56f22c34f7e" (UID: "f2525385-23f5-47c8-aee0-d56f22c34f7e"). InnerVolumeSpecName "kube-api-access-rtlzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.245894 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-util" (OuterVolumeSpecName: "util") pod "f2525385-23f5-47c8-aee0-d56f22c34f7e" (UID: "f2525385-23f5-47c8-aee0-d56f22c34f7e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.315180 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtlzh\" (UniqueName: \"kubernetes.io/projected/f2525385-23f5-47c8-aee0-d56f22c34f7e-kube-api-access-rtlzh\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.315219 4745 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.315231 4745 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2525385-23f5-47c8-aee0-d56f22c34f7e-util\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.946123 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" event={"ID":"f2525385-23f5-47c8-aee0-d56f22c34f7e","Type":"ContainerDied","Data":"9f1ac7413563993d05383005a7ff898204ec217ccfb671760ead15cac500a92b"} Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.946167 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s" Dec 08 00:19:25 crc kubenswrapper[4745]: I1208 00:19:25.946184 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f1ac7413563993d05383005a7ff898204ec217ccfb671760ead15cac500a92b" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.534211 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46"] Dec 08 00:19:26 crc kubenswrapper[4745]: E1208 00:19:26.534854 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerName="extract" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.534876 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerName="extract" Dec 08 00:19:26 crc kubenswrapper[4745]: E1208 00:19:26.534910 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerName="pull" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.534950 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerName="pull" Dec 08 00:19:26 crc kubenswrapper[4745]: E1208 00:19:26.534971 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerName="util" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.534984 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerName="util" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.535153 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2525385-23f5-47c8-aee0-d56f22c34f7e" containerName="extract" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.536379 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.546115 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.560852 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46"] Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.646370 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.646449 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6xqn\" (UniqueName: \"kubernetes.io/projected/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-kube-api-access-b6xqn\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.646482 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.747333 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.747820 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6xqn\" (UniqueName: \"kubernetes.io/projected/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-kube-api-access-b6xqn\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.748163 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.747759 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.748613 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.767747 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6xqn\" (UniqueName: \"kubernetes.io/projected/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-kube-api-access-b6xqn\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:26 crc kubenswrapper[4745]: I1208 00:19:26.883051 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.336809 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl"] Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.339481 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.356749 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl"] Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.358674 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.358793 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.359006 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbwk6\" (UniqueName: \"kubernetes.io/projected/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-kube-api-access-hbwk6\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: W1208 00:19:27.391714 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb550ae0_c84f_4ddc_a717_c54f7cbf63bd.slice/crio-e38472e793f793c0692b0e603c99b5181f49930d8823e4e293e9f4135870f15f WatchSource:0}: Error finding container e38472e793f793c0692b0e603c99b5181f49930d8823e4e293e9f4135870f15f: Status 404 returned error can't find the container with id e38472e793f793c0692b0e603c99b5181f49930d8823e4e293e9f4135870f15f Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.394660 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46"] Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.459585 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbwk6\" (UniqueName: \"kubernetes.io/projected/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-kube-api-access-hbwk6\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.459650 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.459698 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.460101 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.460214 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.476857 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbwk6\" (UniqueName: \"kubernetes.io/projected/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-kube-api-access-hbwk6\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.682988 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.949751 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl"] Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.961145 4745 generic.go:334] "Generic (PLEG): container finished" podID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerID="fa96f6153b9ed36240b8ed4bdf3443aa2fba4095c872763b1a0695a4b22c2961" exitCode=0 Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.961206 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" event={"ID":"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd","Type":"ContainerDied","Data":"fa96f6153b9ed36240b8ed4bdf3443aa2fba4095c872763b1a0695a4b22c2961"} Dec 08 00:19:27 crc kubenswrapper[4745]: I1208 00:19:27.961246 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" event={"ID":"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd","Type":"ContainerStarted","Data":"e38472e793f793c0692b0e603c99b5181f49930d8823e4e293e9f4135870f15f"} Dec 08 00:19:27 crc kubenswrapper[4745]: W1208 00:19:27.961952 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fd8e5bb_a3a3_4a3f_a26b_6768856dc30a.slice/crio-280bda2b2483dceb925820f2fb533e47ded173decf0b2712e6d65fee03b05696 WatchSource:0}: Error finding container 280bda2b2483dceb925820f2fb533e47ded173decf0b2712e6d65fee03b05696: Status 404 returned error can't find the container with id 280bda2b2483dceb925820f2fb533e47ded173decf0b2712e6d65fee03b05696 Dec 08 00:19:28 crc kubenswrapper[4745]: I1208 00:19:28.968109 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerID="ea7f2b15e1272889da9498290f2c6fc2a84b234692dc28c653650778e6601649" exitCode=0 Dec 08 00:19:28 crc kubenswrapper[4745]: I1208 00:19:28.968177 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" event={"ID":"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a","Type":"ContainerDied","Data":"ea7f2b15e1272889da9498290f2c6fc2a84b234692dc28c653650778e6601649"} Dec 08 00:19:28 crc kubenswrapper[4745]: I1208 00:19:28.968215 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" event={"ID":"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a","Type":"ContainerStarted","Data":"280bda2b2483dceb925820f2fb533e47ded173decf0b2712e6d65fee03b05696"} Dec 08 00:19:29 crc kubenswrapper[4745]: I1208 00:19:29.984500 4745 generic.go:334] "Generic (PLEG): container finished" podID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerID="b577ab76fb641c025ccfa9faafbce91c1d970fd97a4119ce60ac71cd72ad664d" exitCode=0 Dec 08 00:19:29 crc kubenswrapper[4745]: I1208 00:19:29.984549 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" event={"ID":"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd","Type":"ContainerDied","Data":"b577ab76fb641c025ccfa9faafbce91c1d970fd97a4119ce60ac71cd72ad664d"} Dec 08 00:19:30 crc kubenswrapper[4745]: I1208 00:19:30.991464 4745 generic.go:334] "Generic (PLEG): container finished" podID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerID="e6930592230170584c262c39fcb667bb81970af2444154dfc647f4ce99bb85ea" exitCode=0 Dec 08 00:19:30 crc kubenswrapper[4745]: I1208 00:19:30.991591 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" event={"ID":"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd","Type":"ContainerDied","Data":"e6930592230170584c262c39fcb667bb81970af2444154dfc647f4ce99bb85ea"} Dec 08 00:19:30 crc kubenswrapper[4745]: I1208 00:19:30.993650 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerID="1b1ab5f9d76ca09c45f51e8b90b705a75d4799071fa8486588e147e48f9f86e6" exitCode=0 Dec 08 00:19:30 crc kubenswrapper[4745]: I1208 00:19:30.993700 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" event={"ID":"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a","Type":"ContainerDied","Data":"1b1ab5f9d76ca09c45f51e8b90b705a75d4799071fa8486588e147e48f9f86e6"} Dec 08 00:19:31 crc kubenswrapper[4745]: I1208 00:19:31.999305 4745 generic.go:334] "Generic (PLEG): container finished" podID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerID="0a8e0c6c8d98c2d1d6816028e270c3769864ab62ea4583ee53d1840f8ded1a97" exitCode=0 Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:31.999434 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" event={"ID":"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a","Type":"ContainerDied","Data":"0a8e0c6c8d98c2d1d6816028e270c3769864ab62ea4583ee53d1840f8ded1a97"} Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.236982 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.325838 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-util\") pod \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.325909 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6xqn\" (UniqueName: \"kubernetes.io/projected/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-kube-api-access-b6xqn\") pod \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.325979 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-bundle\") pod \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\" (UID: \"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd\") " Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.326541 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-bundle" (OuterVolumeSpecName: "bundle") pod "eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" (UID: "eb550ae0-c84f-4ddc-a717-c54f7cbf63bd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.331647 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-kube-api-access-b6xqn" (OuterVolumeSpecName: "kube-api-access-b6xqn") pod "eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" (UID: "eb550ae0-c84f-4ddc-a717-c54f7cbf63bd"). InnerVolumeSpecName "kube-api-access-b6xqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.346009 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-util" (OuterVolumeSpecName: "util") pod "eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" (UID: "eb550ae0-c84f-4ddc-a717-c54f7cbf63bd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.427399 4745 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-util\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.427431 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6xqn\" (UniqueName: \"kubernetes.io/projected/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-kube-api-access-b6xqn\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:32 crc kubenswrapper[4745]: I1208 00:19:32.427444 4745 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eb550ae0-c84f-4ddc-a717-c54f7cbf63bd-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.005347 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" event={"ID":"eb550ae0-c84f-4ddc-a717-c54f7cbf63bd","Type":"ContainerDied","Data":"e38472e793f793c0692b0e603c99b5181f49930d8823e4e293e9f4135870f15f"} Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.005378 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.005390 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e38472e793f793c0692b0e603c99b5181f49930d8823e4e293e9f4135870f15f" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.284660 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.336466 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-bundle\") pod \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.337394 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-bundle" (OuterVolumeSpecName: "bundle") pod "1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" (UID: "1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.337527 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbwk6\" (UniqueName: \"kubernetes.io/projected/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-kube-api-access-hbwk6\") pod \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.338019 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-util\") pod \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\" (UID: \"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a\") " Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.338228 4745 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.343064 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-kube-api-access-hbwk6" (OuterVolumeSpecName: "kube-api-access-hbwk6") pod "1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" (UID: "1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a"). InnerVolumeSpecName "kube-api-access-hbwk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.352561 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-util" (OuterVolumeSpecName: "util") pod "1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" (UID: "1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.439657 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbwk6\" (UniqueName: \"kubernetes.io/projected/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-kube-api-access-hbwk6\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:33 crc kubenswrapper[4745]: I1208 00:19:33.439697 4745 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a-util\") on node \"crc\" DevicePath \"\"" Dec 08 00:19:34 crc kubenswrapper[4745]: I1208 00:19:34.011396 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" event={"ID":"1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a","Type":"ContainerDied","Data":"280bda2b2483dceb925820f2fb533e47ded173decf0b2712e6d65fee03b05696"} Dec 08 00:19:34 crc kubenswrapper[4745]: I1208 00:19:34.011766 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="280bda2b2483dceb925820f2fb533e47ded173decf0b2712e6d65fee03b05696" Dec 08 00:19:34 crc kubenswrapper[4745]: I1208 00:19:34.011462 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294132 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6"] Dec 08 00:19:35 crc kubenswrapper[4745]: E1208 00:19:35.294317 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerName="extract" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294331 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerName="extract" Dec 08 00:19:35 crc kubenswrapper[4745]: E1208 00:19:35.294344 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerName="pull" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294350 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerName="pull" Dec 08 00:19:35 crc kubenswrapper[4745]: E1208 00:19:35.294360 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerName="util" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294367 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerName="util" Dec 08 00:19:35 crc kubenswrapper[4745]: E1208 00:19:35.294377 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerName="util" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294385 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerName="util" Dec 08 00:19:35 crc kubenswrapper[4745]: E1208 00:19:35.294394 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerName="pull" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294401 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerName="pull" Dec 08 00:19:35 crc kubenswrapper[4745]: E1208 00:19:35.294411 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerName="extract" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294417 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerName="extract" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294504 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a" containerName="extract" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.294516 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb550ae0-c84f-4ddc-a717-c54f7cbf63bd" containerName="extract" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.295316 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.296985 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.311047 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6"] Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.357767 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.357838 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9gk8\" (UniqueName: \"kubernetes.io/projected/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-kube-api-access-d9gk8\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.357889 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.458818 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.458881 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9gk8\" (UniqueName: \"kubernetes.io/projected/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-kube-api-access-d9gk8\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.458921 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.459378 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.459517 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.475683 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9gk8\" (UniqueName: \"kubernetes.io/projected/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-kube-api-access-d9gk8\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.650474 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:19:35 crc kubenswrapper[4745]: I1208 00:19:35.995833 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6"] Dec 08 00:19:36 crc kubenswrapper[4745]: W1208 00:19:36.004532 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02dc9d5d_28eb_4c8e_af70_445fc2a8214e.slice/crio-c464382c4b9549cd9209b0393508769fc572fa3d7e815f429e2d2b92b6f5d6d2 WatchSource:0}: Error finding container c464382c4b9549cd9209b0393508769fc572fa3d7e815f429e2d2b92b6f5d6d2: Status 404 returned error can't find the container with id c464382c4b9549cd9209b0393508769fc572fa3d7e815f429e2d2b92b6f5d6d2 Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.023855 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" event={"ID":"02dc9d5d-28eb-4c8e-af70-445fc2a8214e","Type":"ContainerStarted","Data":"c464382c4b9549cd9209b0393508769fc572fa3d7e815f429e2d2b92b6f5d6d2"} Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.759529 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr"] Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.760642 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.762414 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.762656 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.762811 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-8krwf" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.775147 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr"] Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.871816 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6"] Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.872451 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.873757 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-wcldl" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.875717 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrtdc\" (UniqueName: \"kubernetes.io/projected/a2fa94d7-daa5-4465-a657-7d48ec101d98-kube-api-access-zrtdc\") pod \"obo-prometheus-operator-668cf9dfbb-lrvkr\" (UID: \"a2fa94d7-daa5-4465-a657-7d48ec101d98\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.877606 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.880820 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2"] Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.881634 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.891029 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6"] Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.900737 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2"] Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.977386 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dc180ed4-bc7e-4a33-ba55-def51f0edd4e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6\" (UID: \"dc180ed4-bc7e-4a33-ba55-def51f0edd4e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.977636 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dc180ed4-bc7e-4a33-ba55-def51f0edd4e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6\" (UID: \"dc180ed4-bc7e-4a33-ba55-def51f0edd4e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.977890 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrtdc\" (UniqueName: \"kubernetes.io/projected/a2fa94d7-daa5-4465-a657-7d48ec101d98-kube-api-access-zrtdc\") pod \"obo-prometheus-operator-668cf9dfbb-lrvkr\" (UID: \"a2fa94d7-daa5-4465-a657-7d48ec101d98\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.979821 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-bl7n7"] Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.980682 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.982787 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-jwlrm" Dec 08 00:19:36 crc kubenswrapper[4745]: I1208 00:19:36.983060 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.002352 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-bl7n7"] Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.008294 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrtdc\" (UniqueName: \"kubernetes.io/projected/a2fa94d7-daa5-4465-a657-7d48ec101d98-kube-api-access-zrtdc\") pod \"obo-prometheus-operator-668cf9dfbb-lrvkr\" (UID: \"a2fa94d7-daa5-4465-a657-7d48ec101d98\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.032022 4745 generic.go:334] "Generic (PLEG): container finished" podID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerID="d9f8f18544e17e51fdc91f89cff08675d223cf0cb638954092e2cabd5f02ec33" exitCode=0 Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.032054 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" event={"ID":"02dc9d5d-28eb-4c8e-af70-445fc2a8214e","Type":"ContainerDied","Data":"d9f8f18544e17e51fdc91f89cff08675d223cf0cb638954092e2cabd5f02ec33"} Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.073793 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.093268 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/66dacf9f-e094-4d4a-ab15-c8d2da21d334-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2\" (UID: \"66dacf9f-e094-4d4a-ab15-c8d2da21d334\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.093518 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dc180ed4-bc7e-4a33-ba55-def51f0edd4e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6\" (UID: \"dc180ed4-bc7e-4a33-ba55-def51f0edd4e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.093554 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/66dacf9f-e094-4d4a-ab15-c8d2da21d334-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2\" (UID: \"66dacf9f-e094-4d4a-ab15-c8d2da21d334\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.093594 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dc180ed4-bc7e-4a33-ba55-def51f0edd4e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6\" (UID: \"dc180ed4-bc7e-4a33-ba55-def51f0edd4e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.098620 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/dc180ed4-bc7e-4a33-ba55-def51f0edd4e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6\" (UID: \"dc180ed4-bc7e-4a33-ba55-def51f0edd4e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.101464 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/dc180ed4-bc7e-4a33-ba55-def51f0edd4e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6\" (UID: \"dc180ed4-bc7e-4a33-ba55-def51f0edd4e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.183444 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-drjd7"] Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.184622 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.187390 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-f294c" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.189402 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.195466 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh47v\" (UniqueName: \"kubernetes.io/projected/ee524a4b-6873-4801-af1a-955b4c28dd27-kube-api-access-kh47v\") pod \"observability-operator-d8bb48f5d-bl7n7\" (UID: \"ee524a4b-6873-4801-af1a-955b4c28dd27\") " pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.195511 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ee524a4b-6873-4801-af1a-955b4c28dd27-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-bl7n7\" (UID: \"ee524a4b-6873-4801-af1a-955b4c28dd27\") " pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.195575 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hfxn\" (UniqueName: \"kubernetes.io/projected/367235e5-1c6a-42e1-b7fa-39dd81931cb9-kube-api-access-4hfxn\") pod \"perses-operator-5446b9c989-drjd7\" (UID: \"367235e5-1c6a-42e1-b7fa-39dd81931cb9\") " pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.195605 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/66dacf9f-e094-4d4a-ab15-c8d2da21d334-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2\" (UID: \"66dacf9f-e094-4d4a-ab15-c8d2da21d334\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.196157 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/367235e5-1c6a-42e1-b7fa-39dd81931cb9-openshift-service-ca\") pod \"perses-operator-5446b9c989-drjd7\" (UID: \"367235e5-1c6a-42e1-b7fa-39dd81931cb9\") " pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.196188 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/66dacf9f-e094-4d4a-ab15-c8d2da21d334-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2\" (UID: \"66dacf9f-e094-4d4a-ab15-c8d2da21d334\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.196701 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-drjd7"] Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.198877 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/66dacf9f-e094-4d4a-ab15-c8d2da21d334-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2\" (UID: \"66dacf9f-e094-4d4a-ab15-c8d2da21d334\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.200995 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/66dacf9f-e094-4d4a-ab15-c8d2da21d334-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2\" (UID: \"66dacf9f-e094-4d4a-ab15-c8d2da21d334\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.201297 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.296749 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hfxn\" (UniqueName: \"kubernetes.io/projected/367235e5-1c6a-42e1-b7fa-39dd81931cb9-kube-api-access-4hfxn\") pod \"perses-operator-5446b9c989-drjd7\" (UID: \"367235e5-1c6a-42e1-b7fa-39dd81931cb9\") " pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.296784 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/367235e5-1c6a-42e1-b7fa-39dd81931cb9-openshift-service-ca\") pod \"perses-operator-5446b9c989-drjd7\" (UID: \"367235e5-1c6a-42e1-b7fa-39dd81931cb9\") " pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.296841 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh47v\" (UniqueName: \"kubernetes.io/projected/ee524a4b-6873-4801-af1a-955b4c28dd27-kube-api-access-kh47v\") pod \"observability-operator-d8bb48f5d-bl7n7\" (UID: \"ee524a4b-6873-4801-af1a-955b4c28dd27\") " pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.296871 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ee524a4b-6873-4801-af1a-955b4c28dd27-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-bl7n7\" (UID: \"ee524a4b-6873-4801-af1a-955b4c28dd27\") " pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.300738 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/367235e5-1c6a-42e1-b7fa-39dd81931cb9-openshift-service-ca\") pod \"perses-operator-5446b9c989-drjd7\" (UID: \"367235e5-1c6a-42e1-b7fa-39dd81931cb9\") " pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.304437 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ee524a4b-6873-4801-af1a-955b4c28dd27-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-bl7n7\" (UID: \"ee524a4b-6873-4801-af1a-955b4c28dd27\") " pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.341550 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hfxn\" (UniqueName: \"kubernetes.io/projected/367235e5-1c6a-42e1-b7fa-39dd81931cb9-kube-api-access-4hfxn\") pod \"perses-operator-5446b9c989-drjd7\" (UID: \"367235e5-1c6a-42e1-b7fa-39dd81931cb9\") " pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.365595 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh47v\" (UniqueName: \"kubernetes.io/projected/ee524a4b-6873-4801-af1a-955b4c28dd27-kube-api-access-kh47v\") pod \"observability-operator-d8bb48f5d-bl7n7\" (UID: \"ee524a4b-6873-4801-af1a-955b4c28dd27\") " pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.548872 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.597335 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.625114 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr"] Dec 08 00:19:37 crc kubenswrapper[4745]: W1208 00:19:37.765646 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2fa94d7_daa5_4465_a657_7d48ec101d98.slice/crio-5dfdc12c03837ea58fc86ecfa6acc0acc4b7a112ddf7f33e53acc6697300e89c WatchSource:0}: Error finding container 5dfdc12c03837ea58fc86ecfa6acc0acc4b7a112ddf7f33e53acc6697300e89c: Status 404 returned error can't find the container with id 5dfdc12c03837ea58fc86ecfa6acc0acc4b7a112ddf7f33e53acc6697300e89c Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.797593 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6"] Dec 08 00:19:37 crc kubenswrapper[4745]: I1208 00:19:37.811727 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2"] Dec 08 00:19:37 crc kubenswrapper[4745]: W1208 00:19:37.825558 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc180ed4_bc7e_4a33_ba55_def51f0edd4e.slice/crio-b952cef1bcf1715cb57c08df119df856386cbe09d1f8542f7e544cd2bea13d57 WatchSource:0}: Error finding container b952cef1bcf1715cb57c08df119df856386cbe09d1f8542f7e544cd2bea13d57: Status 404 returned error can't find the container with id b952cef1bcf1715cb57c08df119df856386cbe09d1f8542f7e544cd2bea13d57 Dec 08 00:19:38 crc kubenswrapper[4745]: I1208 00:19:38.045851 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" event={"ID":"dc180ed4-bc7e-4a33-ba55-def51f0edd4e","Type":"ContainerStarted","Data":"b952cef1bcf1715cb57c08df119df856386cbe09d1f8542f7e544cd2bea13d57"} Dec 08 00:19:38 crc kubenswrapper[4745]: I1208 00:19:38.047156 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" event={"ID":"a2fa94d7-daa5-4465-a657-7d48ec101d98","Type":"ContainerStarted","Data":"5dfdc12c03837ea58fc86ecfa6acc0acc4b7a112ddf7f33e53acc6697300e89c"} Dec 08 00:19:38 crc kubenswrapper[4745]: I1208 00:19:38.048963 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" event={"ID":"66dacf9f-e094-4d4a-ab15-c8d2da21d334","Type":"ContainerStarted","Data":"ae9528880806cdd9caf6872f0ae8e1a0e858eaf8ba6bb47767d8e0a7bb99dc4e"} Dec 08 00:19:38 crc kubenswrapper[4745]: I1208 00:19:38.138478 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-bl7n7"] Dec 08 00:19:38 crc kubenswrapper[4745]: I1208 00:19:38.178444 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-drjd7"] Dec 08 00:19:39 crc kubenswrapper[4745]: I1208 00:19:39.057713 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" event={"ID":"ee524a4b-6873-4801-af1a-955b4c28dd27","Type":"ContainerStarted","Data":"9a69de2948323f46984c0de97ac5e49560e65843c40621260f576b1639df611e"} Dec 08 00:19:39 crc kubenswrapper[4745]: I1208 00:19:39.064919 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-drjd7" event={"ID":"367235e5-1c6a-42e1-b7fa-39dd81931cb9","Type":"ContainerStarted","Data":"98f7d3c1c6d42b0729868aca5f22ad47e4ce89af43af8851a6fb56bad383714a"} Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.719585 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-766bf8575-sbkqw"] Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.720749 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.724462 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.724503 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-99pq9" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.724543 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.724673 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.734660 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-766bf8575-sbkqw"] Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.855003 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d387d453-54c8-4288-a66b-e6e6ea3ab09c-webhook-cert\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.855609 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slhmc\" (UniqueName: \"kubernetes.io/projected/d387d453-54c8-4288-a66b-e6e6ea3ab09c-kube-api-access-slhmc\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.856461 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d387d453-54c8-4288-a66b-e6e6ea3ab09c-apiservice-cert\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.957734 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d387d453-54c8-4288-a66b-e6e6ea3ab09c-webhook-cert\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.957806 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slhmc\" (UniqueName: \"kubernetes.io/projected/d387d453-54c8-4288-a66b-e6e6ea3ab09c-kube-api-access-slhmc\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.957834 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d387d453-54c8-4288-a66b-e6e6ea3ab09c-apiservice-cert\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.963484 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d387d453-54c8-4288-a66b-e6e6ea3ab09c-apiservice-cert\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:42 crc kubenswrapper[4745]: I1208 00:19:42.964491 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d387d453-54c8-4288-a66b-e6e6ea3ab09c-webhook-cert\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:43 crc kubenswrapper[4745]: I1208 00:19:43.053620 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slhmc\" (UniqueName: \"kubernetes.io/projected/d387d453-54c8-4288-a66b-e6e6ea3ab09c-kube-api-access-slhmc\") pod \"elastic-operator-766bf8575-sbkqw\" (UID: \"d387d453-54c8-4288-a66b-e6e6ea3ab09c\") " pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:43 crc kubenswrapper[4745]: I1208 00:19:43.347286 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-766bf8575-sbkqw" Dec 08 00:19:45 crc kubenswrapper[4745]: I1208 00:19:45.947624 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-b22m4"] Dec 08 00:19:45 crc kubenswrapper[4745]: I1208 00:19:45.948827 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" Dec 08 00:19:45 crc kubenswrapper[4745]: I1208 00:19:45.950758 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-nnnjm" Dec 08 00:19:45 crc kubenswrapper[4745]: I1208 00:19:45.965877 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-b22m4"] Dec 08 00:19:46 crc kubenswrapper[4745]: I1208 00:19:46.125878 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xrsh\" (UniqueName: \"kubernetes.io/projected/d84baf13-44a0-43a7-8c1e-a8e3165dd02d-kube-api-access-8xrsh\") pod \"interconnect-operator-5bb49f789d-b22m4\" (UID: \"d84baf13-44a0-43a7-8c1e-a8e3165dd02d\") " pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" Dec 08 00:19:46 crc kubenswrapper[4745]: I1208 00:19:46.238891 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xrsh\" (UniqueName: \"kubernetes.io/projected/d84baf13-44a0-43a7-8c1e-a8e3165dd02d-kube-api-access-8xrsh\") pod \"interconnect-operator-5bb49f789d-b22m4\" (UID: \"d84baf13-44a0-43a7-8c1e-a8e3165dd02d\") " pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" Dec 08 00:19:46 crc kubenswrapper[4745]: I1208 00:19:46.288208 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xrsh\" (UniqueName: \"kubernetes.io/projected/d84baf13-44a0-43a7-8c1e-a8e3165dd02d-kube-api-access-8xrsh\") pod \"interconnect-operator-5bb49f789d-b22m4\" (UID: \"d84baf13-44a0-43a7-8c1e-a8e3165dd02d\") " pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" Dec 08 00:19:46 crc kubenswrapper[4745]: I1208 00:19:46.563537 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" Dec 08 00:19:52 crc kubenswrapper[4745]: I1208 00:19:52.460983 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:19:52 crc kubenswrapper[4745]: I1208 00:19:52.461386 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:19:56 crc kubenswrapper[4745]: E1208 00:19:56.631521 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 08 00:19:56 crc kubenswrapper[4745]: E1208 00:19:56.632199 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kh47v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-bl7n7_openshift-operators(ee524a4b-6873-4801-af1a-955b4c28dd27): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:19:56 crc kubenswrapper[4745]: E1208 00:19:56.633438 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" podUID="ee524a4b-6873-4801-af1a-955b4c28dd27" Dec 08 00:19:57 crc kubenswrapper[4745]: E1208 00:19:57.167956 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 08 00:19:57 crc kubenswrapper[4745]: E1208 00:19:57.168165 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6_openshift-operators(dc180ed4-bc7e-4a33-ba55-def51f0edd4e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:19:57 crc kubenswrapper[4745]: E1208 00:19:57.169579 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" podUID="dc180ed4-bc7e-4a33-ba55-def51f0edd4e" Dec 08 00:19:57 crc kubenswrapper[4745]: E1208 00:19:57.204794 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" podUID="ee524a4b-6873-4801-af1a-955b4c28dd27" Dec 08 00:19:57 crc kubenswrapper[4745]: E1208 00:19:57.205545 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" podUID="dc180ed4-bc7e-4a33-ba55-def51f0edd4e" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.081877 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.082060 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4hfxn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-drjd7_openshift-operators(367235e5-1c6a-42e1-b7fa-39dd81931cb9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.083245 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-drjd7" podUID="367235e5-1c6a-42e1-b7fa-39dd81931cb9" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.101375 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.101750 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2_openshift-operators(66dacf9f-e094-4d4a-ab15-c8d2da21d334): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.103134 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" podUID="66dacf9f-e094-4d4a-ab15-c8d2da21d334" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.207741 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" podUID="66dacf9f-e094-4d4a-ab15-c8d2da21d334" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.207990 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-drjd7" podUID="367235e5-1c6a-42e1-b7fa-39dd81931cb9" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.719328 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.719485 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zrtdc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-lrvkr_openshift-operators(a2fa94d7-daa5-4465-a657-7d48ec101d98): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 08 00:19:58 crc kubenswrapper[4745]: E1208 00:19:58.720671 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" podUID="a2fa94d7-daa5-4465-a657-7d48ec101d98" Dec 08 00:19:58 crc kubenswrapper[4745]: I1208 00:19:58.953405 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-766bf8575-sbkqw"] Dec 08 00:19:58 crc kubenswrapper[4745]: W1208 00:19:58.961634 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd387d453_54c8_4288_a66b_e6e6ea3ab09c.slice/crio-055e9c6fd0dd5b4292a1cf90a3455638ee48b39e1e4a4a14fa77f19f8f937baf WatchSource:0}: Error finding container 055e9c6fd0dd5b4292a1cf90a3455638ee48b39e1e4a4a14fa77f19f8f937baf: Status 404 returned error can't find the container with id 055e9c6fd0dd5b4292a1cf90a3455638ee48b39e1e4a4a14fa77f19f8f937baf Dec 08 00:19:59 crc kubenswrapper[4745]: I1208 00:19:59.007829 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-b22m4"] Dec 08 00:19:59 crc kubenswrapper[4745]: I1208 00:19:59.217417 4745 generic.go:334] "Generic (PLEG): container finished" podID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerID="1b8fa5a1ba8d0d52d4a1e959c10d9df9bfcb19f5a747156354c38d1cd185bdbd" exitCode=0 Dec 08 00:19:59 crc kubenswrapper[4745]: I1208 00:19:59.217521 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" event={"ID":"02dc9d5d-28eb-4c8e-af70-445fc2a8214e","Type":"ContainerDied","Data":"1b8fa5a1ba8d0d52d4a1e959c10d9df9bfcb19f5a747156354c38d1cd185bdbd"} Dec 08 00:19:59 crc kubenswrapper[4745]: I1208 00:19:59.219596 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" event={"ID":"d84baf13-44a0-43a7-8c1e-a8e3165dd02d","Type":"ContainerStarted","Data":"1e318df2b7f3021c192418d5737cd3c08d4d494c94ece6141dcfafd5df292d6e"} Dec 08 00:19:59 crc kubenswrapper[4745]: I1208 00:19:59.227303 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-766bf8575-sbkqw" event={"ID":"d387d453-54c8-4288-a66b-e6e6ea3ab09c","Type":"ContainerStarted","Data":"055e9c6fd0dd5b4292a1cf90a3455638ee48b39e1e4a4a14fa77f19f8f937baf"} Dec 08 00:19:59 crc kubenswrapper[4745]: E1208 00:19:59.247443 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" podUID="a2fa94d7-daa5-4465-a657-7d48ec101d98" Dec 08 00:20:00 crc kubenswrapper[4745]: I1208 00:20:00.280451 4745 generic.go:334] "Generic (PLEG): container finished" podID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerID="1dc4e5b81372adbf34339402bce252741c8a698f2c43b5e83316835eb5198e97" exitCode=0 Dec 08 00:20:00 crc kubenswrapper[4745]: I1208 00:20:00.281076 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" event={"ID":"02dc9d5d-28eb-4c8e-af70-445fc2a8214e","Type":"ContainerDied","Data":"1dc4e5b81372adbf34339402bce252741c8a698f2c43b5e83316835eb5198e97"} Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.210385 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.298359 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" event={"ID":"02dc9d5d-28eb-4c8e-af70-445fc2a8214e","Type":"ContainerDied","Data":"c464382c4b9549cd9209b0393508769fc572fa3d7e815f429e2d2b92b6f5d6d2"} Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.298408 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c464382c4b9549cd9209b0393508769fc572fa3d7e815f429e2d2b92b6f5d6d2" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.298430 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.311604 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-util\") pod \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.311669 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9gk8\" (UniqueName: \"kubernetes.io/projected/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-kube-api-access-d9gk8\") pod \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.311721 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-bundle\") pod \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\" (UID: \"02dc9d5d-28eb-4c8e-af70-445fc2a8214e\") " Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.314003 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-bundle" (OuterVolumeSpecName: "bundle") pod "02dc9d5d-28eb-4c8e-af70-445fc2a8214e" (UID: "02dc9d5d-28eb-4c8e-af70-445fc2a8214e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.328126 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-kube-api-access-d9gk8" (OuterVolumeSpecName: "kube-api-access-d9gk8") pod "02dc9d5d-28eb-4c8e-af70-445fc2a8214e" (UID: "02dc9d5d-28eb-4c8e-af70-445fc2a8214e"). InnerVolumeSpecName "kube-api-access-d9gk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.332588 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-util" (OuterVolumeSpecName: "util") pod "02dc9d5d-28eb-4c8e-af70-445fc2a8214e" (UID: "02dc9d5d-28eb-4c8e-af70-445fc2a8214e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.412849 4745 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-util\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.414190 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9gk8\" (UniqueName: \"kubernetes.io/projected/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-kube-api-access-d9gk8\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:02 crc kubenswrapper[4745]: I1208 00:20:02.414228 4745 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/02dc9d5d-28eb-4c8e-af70-445fc2a8214e-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.071529 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cqh4k"] Dec 08 00:20:03 crc kubenswrapper[4745]: E1208 00:20:03.071771 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerName="extract" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.071785 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerName="extract" Dec 08 00:20:03 crc kubenswrapper[4745]: E1208 00:20:03.071804 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerName="util" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.071811 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerName="util" Dec 08 00:20:03 crc kubenswrapper[4745]: E1208 00:20:03.071826 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerName="pull" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.071834 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerName="pull" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.071982 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="02dc9d5d-28eb-4c8e-af70-445fc2a8214e" containerName="extract" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.072899 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.080351 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cqh4k"] Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.123362 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4q24\" (UniqueName: \"kubernetes.io/projected/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-kube-api-access-v4q24\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.123470 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-utilities\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.123540 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-catalog-content\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.225581 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4q24\" (UniqueName: \"kubernetes.io/projected/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-kube-api-access-v4q24\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.225668 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-utilities\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.225719 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-catalog-content\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.226364 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-catalog-content\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.226679 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-utilities\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.242186 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4q24\" (UniqueName: \"kubernetes.io/projected/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-kube-api-access-v4q24\") pod \"redhat-operators-cqh4k\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.305856 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-766bf8575-sbkqw" event={"ID":"d387d453-54c8-4288-a66b-e6e6ea3ab09c","Type":"ContainerStarted","Data":"eeada98a62d0f21e75dd2586d77945408d4e93d3d171aa72d74586ea9cf4569e"} Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.323052 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-766bf8575-sbkqw" podStartSLOduration=18.063345779 podStartE2EDuration="21.323034709s" podCreationTimestamp="2025-12-08 00:19:42 +0000 UTC" firstStartedPulling="2025-12-08 00:19:58.96351868 +0000 UTC m=+754.392724980" lastFinishedPulling="2025-12-08 00:20:02.22320761 +0000 UTC m=+757.652413910" observedRunningTime="2025-12-08 00:20:03.321095056 +0000 UTC m=+758.750301356" watchObservedRunningTime="2025-12-08 00:20:03.323034709 +0000 UTC m=+758.752241009" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.389694 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.397227 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.399492 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.402283 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.402591 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.408275 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.408503 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.408611 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.409065 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.409212 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.409210 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.409869 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-c54t8" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.420181 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427607 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427661 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427686 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427702 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427717 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427738 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427754 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427772 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427790 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427806 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427820 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427835 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427851 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427878 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.427894 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.528966 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529299 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529329 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529348 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529363 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529381 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529399 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529419 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529444 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529462 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529480 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529498 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529520 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529568 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.529586 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.530436 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.531156 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.531734 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.532160 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.532311 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.532626 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.533561 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.533591 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.534279 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.534506 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.534588 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.535646 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.536253 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.544429 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.554529 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/c18b8080-8e69-4234-9e9d-7ec8cacb62a3-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"c18b8080-8e69-4234-9e9d-7ec8cacb62a3\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.761547 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:03 crc kubenswrapper[4745]: I1208 00:20:03.838277 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cqh4k"] Dec 08 00:20:06 crc kubenswrapper[4745]: I1208 00:20:06.741392 4745 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 08 00:20:08 crc kubenswrapper[4745]: I1208 00:20:08.057666 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 08 00:20:08 crc kubenswrapper[4745]: I1208 00:20:08.330934 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c18b8080-8e69-4234-9e9d-7ec8cacb62a3","Type":"ContainerStarted","Data":"138e7b2700890ffa000c4b2c4ce694aa4a5864e5e5d31ea19dc237657e8f9277"} Dec 08 00:20:08 crc kubenswrapper[4745]: I1208 00:20:08.331904 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" event={"ID":"d84baf13-44a0-43a7-8c1e-a8e3165dd02d","Type":"ContainerStarted","Data":"3a353190a8af17f6abfb1b7409cefe9bd624c7150bf0b55f9057c7e9dba6d8aa"} Dec 08 00:20:08 crc kubenswrapper[4745]: I1208 00:20:08.333867 4745 generic.go:334] "Generic (PLEG): container finished" podID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerID="a170dec209793afacbf8fd8d4e7cfbfa0a4ae9d5df8fc2291c6006c168efeabd" exitCode=0 Dec 08 00:20:08 crc kubenswrapper[4745]: I1208 00:20:08.333905 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqh4k" event={"ID":"5a92aad8-3ca0-4f04-8a46-1feb10039d1a","Type":"ContainerDied","Data":"a170dec209793afacbf8fd8d4e7cfbfa0a4ae9d5df8fc2291c6006c168efeabd"} Dec 08 00:20:08 crc kubenswrapper[4745]: I1208 00:20:08.333941 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqh4k" event={"ID":"5a92aad8-3ca0-4f04-8a46-1feb10039d1a","Type":"ContainerStarted","Data":"d807a4a27efb79845d7d0553ebd55d1845ed5ea2312e84862ef835597abde347"} Dec 08 00:20:08 crc kubenswrapper[4745]: I1208 00:20:08.346864 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-b22m4" podStartSLOduration=14.413532786 podStartE2EDuration="23.346841494s" podCreationTimestamp="2025-12-08 00:19:45 +0000 UTC" firstStartedPulling="2025-12-08 00:19:59.028679756 +0000 UTC m=+754.457886056" lastFinishedPulling="2025-12-08 00:20:07.961988464 +0000 UTC m=+763.391194764" observedRunningTime="2025-12-08 00:20:08.345852857 +0000 UTC m=+763.775059177" watchObservedRunningTime="2025-12-08 00:20:08.346841494 +0000 UTC m=+763.776047794" Dec 08 00:20:10 crc kubenswrapper[4745]: I1208 00:20:10.347136 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" event={"ID":"66dacf9f-e094-4d4a-ab15-c8d2da21d334","Type":"ContainerStarted","Data":"5f4d2ab04e09e197c684e78455756c6c398662aeb440b397ca8f87c11950eedc"} Dec 08 00:20:10 crc kubenswrapper[4745]: I1208 00:20:10.350644 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" event={"ID":"dc180ed4-bc7e-4a33-ba55-def51f0edd4e","Type":"ContainerStarted","Data":"99368dec85c1624ee802c3f95c28bd9fddb3e834bf7d31d77dd221222fc48b4f"} Dec 08 00:20:10 crc kubenswrapper[4745]: I1208 00:20:10.352778 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqh4k" event={"ID":"5a92aad8-3ca0-4f04-8a46-1feb10039d1a","Type":"ContainerStarted","Data":"f0325e8fd83e18c96da3f99e4724b38b465314071ab6257a0a0f1ac6e49524cc"} Dec 08 00:20:10 crc kubenswrapper[4745]: I1208 00:20:10.371724 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2" podStartSLOduration=2.360529321 podStartE2EDuration="34.371703847s" podCreationTimestamp="2025-12-08 00:19:36 +0000 UTC" firstStartedPulling="2025-12-08 00:19:37.865086576 +0000 UTC m=+733.294292876" lastFinishedPulling="2025-12-08 00:20:09.876261102 +0000 UTC m=+765.305467402" observedRunningTime="2025-12-08 00:20:10.370291088 +0000 UTC m=+765.799497438" watchObservedRunningTime="2025-12-08 00:20:10.371703847 +0000 UTC m=+765.800910157" Dec 08 00:20:10 crc kubenswrapper[4745]: I1208 00:20:10.451437 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6" podStartSLOduration=3.005625064 podStartE2EDuration="34.45141712s" podCreationTimestamp="2025-12-08 00:19:36 +0000 UTC" firstStartedPulling="2025-12-08 00:19:37.829223748 +0000 UTC m=+733.258430048" lastFinishedPulling="2025-12-08 00:20:09.275015804 +0000 UTC m=+764.704222104" observedRunningTime="2025-12-08 00:20:10.450233627 +0000 UTC m=+765.879439927" watchObservedRunningTime="2025-12-08 00:20:10.45141712 +0000 UTC m=+765.880623430" Dec 08 00:20:11 crc kubenswrapper[4745]: I1208 00:20:11.360460 4745 generic.go:334] "Generic (PLEG): container finished" podID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerID="f0325e8fd83e18c96da3f99e4724b38b465314071ab6257a0a0f1ac6e49524cc" exitCode=0 Dec 08 00:20:11 crc kubenswrapper[4745]: I1208 00:20:11.360509 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqh4k" event={"ID":"5a92aad8-3ca0-4f04-8a46-1feb10039d1a","Type":"ContainerDied","Data":"f0325e8fd83e18c96da3f99e4724b38b465314071ab6257a0a0f1ac6e49524cc"} Dec 08 00:20:12 crc kubenswrapper[4745]: I1208 00:20:12.375726 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqh4k" event={"ID":"5a92aad8-3ca0-4f04-8a46-1feb10039d1a","Type":"ContainerStarted","Data":"f186ccaa9b9643de5832fa89e953302bcc74956c94ee14a9a1be5b2819cedc61"} Dec 08 00:20:12 crc kubenswrapper[4745]: I1208 00:20:12.422666 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cqh4k" podStartSLOduration=5.658221102 podStartE2EDuration="9.42264822s" podCreationTimestamp="2025-12-08 00:20:03 +0000 UTC" firstStartedPulling="2025-12-08 00:20:08.335218047 +0000 UTC m=+763.764424347" lastFinishedPulling="2025-12-08 00:20:12.099645165 +0000 UTC m=+767.528851465" observedRunningTime="2025-12-08 00:20:12.403776835 +0000 UTC m=+767.832983145" watchObservedRunningTime="2025-12-08 00:20:12.42264822 +0000 UTC m=+767.851854520" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.387040 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-drjd7" event={"ID":"367235e5-1c6a-42e1-b7fa-39dd81931cb9","Type":"ContainerStarted","Data":"eb7762b015d04f55022bfcae925743dc0f5c8b0bcfa5a2d2fc6b6dbd40284fc6"} Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.387253 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.390307 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" event={"ID":"a2fa94d7-daa5-4465-a657-7d48ec101d98","Type":"ContainerStarted","Data":"875494aa5ba533a4769fe9043fc8f0d0e530cff15a00d2c7e4407149ec7333a8"} Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.390386 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.390516 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.393603 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" event={"ID":"ee524a4b-6873-4801-af1a-955b4c28dd27","Type":"ContainerStarted","Data":"8d657d29c09dfe3e924a50070496a121d4a827608f374d00efbf852a31dedd10"} Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.394274 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.406081 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.409433 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-drjd7" podStartSLOduration=2.17130556 podStartE2EDuration="36.409416087s" podCreationTimestamp="2025-12-08 00:19:37 +0000 UTC" firstStartedPulling="2025-12-08 00:19:38.185699174 +0000 UTC m=+733.614905494" lastFinishedPulling="2025-12-08 00:20:12.423809721 +0000 UTC m=+767.853016021" observedRunningTime="2025-12-08 00:20:13.406831436 +0000 UTC m=+768.836037736" watchObservedRunningTime="2025-12-08 00:20:13.409416087 +0000 UTC m=+768.838622387" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.451164 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-bl7n7" podStartSLOduration=2.994970933 podStartE2EDuration="37.451143204s" podCreationTimestamp="2025-12-08 00:19:36 +0000 UTC" firstStartedPulling="2025-12-08 00:19:38.150543496 +0000 UTC m=+733.579749796" lastFinishedPulling="2025-12-08 00:20:12.606715767 +0000 UTC m=+768.035922067" observedRunningTime="2025-12-08 00:20:13.427122359 +0000 UTC m=+768.856328659" watchObservedRunningTime="2025-12-08 00:20:13.451143204 +0000 UTC m=+768.880349514" Dec 08 00:20:13 crc kubenswrapper[4745]: I1208 00:20:13.453573 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-lrvkr" podStartSLOduration=2.327544241 podStartE2EDuration="37.45356414s" podCreationTimestamp="2025-12-08 00:19:36 +0000 UTC" firstStartedPulling="2025-12-08 00:19:37.768605136 +0000 UTC m=+733.197811436" lastFinishedPulling="2025-12-08 00:20:12.894625035 +0000 UTC m=+768.323831335" observedRunningTime="2025-12-08 00:20:13.447622558 +0000 UTC m=+768.876828858" watchObservedRunningTime="2025-12-08 00:20:13.45356414 +0000 UTC m=+768.882770440" Dec 08 00:20:14 crc kubenswrapper[4745]: I1208 00:20:14.441487 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cqh4k" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="registry-server" probeResult="failure" output=< Dec 08 00:20:14 crc kubenswrapper[4745]: timeout: failed to connect service ":50051" within 1s Dec 08 00:20:14 crc kubenswrapper[4745]: > Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.488354 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl"] Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.489371 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.491951 4745 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-mjjsc" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.491981 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.492075 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.501312 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl"] Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.551025 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-drjd7" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.644531 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjxnl\" (UniqueName: \"kubernetes.io/projected/917ff1a2-c53d-459e-ab3f-7a5c67a7ba88-kube-api-access-gjxnl\") pod \"cert-manager-operator-controller-manager-5446d6888b-z6kfl\" (UID: \"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.644609 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/917ff1a2-c53d-459e-ab3f-7a5c67a7ba88-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-z6kfl\" (UID: \"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.745297 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjxnl\" (UniqueName: \"kubernetes.io/projected/917ff1a2-c53d-459e-ab3f-7a5c67a7ba88-kube-api-access-gjxnl\") pod \"cert-manager-operator-controller-manager-5446d6888b-z6kfl\" (UID: \"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.745379 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/917ff1a2-c53d-459e-ab3f-7a5c67a7ba88-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-z6kfl\" (UID: \"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.746002 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/917ff1a2-c53d-459e-ab3f-7a5c67a7ba88-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-z6kfl\" (UID: \"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.792005 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjxnl\" (UniqueName: \"kubernetes.io/projected/917ff1a2-c53d-459e-ab3f-7a5c67a7ba88-kube-api-access-gjxnl\") pod \"cert-manager-operator-controller-manager-5446d6888b-z6kfl\" (UID: \"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:17 crc kubenswrapper[4745]: I1208 00:20:17.807326 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" Dec 08 00:20:18 crc kubenswrapper[4745]: I1208 00:20:18.162810 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl"] Dec 08 00:20:18 crc kubenswrapper[4745]: W1208 00:20:18.166094 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod917ff1a2_c53d_459e_ab3f_7a5c67a7ba88.slice/crio-8b813703dd913b490683278ac6ff2fd0f9f0c8740512556f6b8823203f68f6a3 WatchSource:0}: Error finding container 8b813703dd913b490683278ac6ff2fd0f9f0c8740512556f6b8823203f68f6a3: Status 404 returned error can't find the container with id 8b813703dd913b490683278ac6ff2fd0f9f0c8740512556f6b8823203f68f6a3 Dec 08 00:20:18 crc kubenswrapper[4745]: I1208 00:20:18.439246 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" event={"ID":"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88","Type":"ContainerStarted","Data":"8b813703dd913b490683278ac6ff2fd0f9f0c8740512556f6b8823203f68f6a3"} Dec 08 00:20:22 crc kubenswrapper[4745]: I1208 00:20:22.460918 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:20:22 crc kubenswrapper[4745]: I1208 00:20:22.461321 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:20:22 crc kubenswrapper[4745]: I1208 00:20:22.461377 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:20:22 crc kubenswrapper[4745]: I1208 00:20:22.462090 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d68e50e68d0ae8a6e03f26cfdb8cf98d132e8e3ab3e913de8377758729efd13e"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:20:22 crc kubenswrapper[4745]: I1208 00:20:22.462147 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://d68e50e68d0ae8a6e03f26cfdb8cf98d132e8e3ab3e913de8377758729efd13e" gracePeriod=600 Dec 08 00:20:23 crc kubenswrapper[4745]: I1208 00:20:23.438078 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:23 crc kubenswrapper[4745]: I1208 00:20:23.495559 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:25 crc kubenswrapper[4745]: I1208 00:20:25.485535 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="d68e50e68d0ae8a6e03f26cfdb8cf98d132e8e3ab3e913de8377758729efd13e" exitCode=0 Dec 08 00:20:25 crc kubenswrapper[4745]: I1208 00:20:25.485584 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"d68e50e68d0ae8a6e03f26cfdb8cf98d132e8e3ab3e913de8377758729efd13e"} Dec 08 00:20:25 crc kubenswrapper[4745]: I1208 00:20:25.485628 4745 scope.go:117] "RemoveContainer" containerID="d8c0de7b78040ece60c672aa32dba210e3b388298ab73c3146737e606588c30f" Dec 08 00:20:26 crc kubenswrapper[4745]: I1208 00:20:26.858336 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cqh4k"] Dec 08 00:20:26 crc kubenswrapper[4745]: I1208 00:20:26.858984 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cqh4k" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="registry-server" containerID="cri-o://f186ccaa9b9643de5832fa89e953302bcc74956c94ee14a9a1be5b2819cedc61" gracePeriod=2 Dec 08 00:20:28 crc kubenswrapper[4745]: I1208 00:20:28.505824 4745 generic.go:334] "Generic (PLEG): container finished" podID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerID="f186ccaa9b9643de5832fa89e953302bcc74956c94ee14a9a1be5b2819cedc61" exitCode=0 Dec 08 00:20:28 crc kubenswrapper[4745]: I1208 00:20:28.505908 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqh4k" event={"ID":"5a92aad8-3ca0-4f04-8a46-1feb10039d1a","Type":"ContainerDied","Data":"f186ccaa9b9643de5832fa89e953302bcc74956c94ee14a9a1be5b2819cedc61"} Dec 08 00:20:31 crc kubenswrapper[4745]: E1208 00:20:31.143413 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1403159883/1\": happened during read: context canceled" image="registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911" Dec 08 00:20:31 crc kubenswrapper[4745]: E1208 00:20:31.143664 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cert-manager-operator,Image:registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911,Command:[/usr/bin/cert-manager-operator],Args:[start --v=$(OPERATOR_LOG_LEVEL) --trusted-ca-configmap=$(TRUSTED_CA_CONFIGMAP_NAME) --cloud-credentials-secret=$(CLOUD_CREDENTIALS_SECRET_NAME) --unsupported-addon-features=$(UNSUPPORTED_ADDON_FEATURES)],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:cert-manager-operator,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_WEBHOOK,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CA_INJECTOR,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CONTROLLER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ACMESOLVER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-acmesolver-rhel9@sha256:ba937fc4b9eee31422914352c11a45b90754ba4fbe490ea45249b90afdc4e0a7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ISTIOCSR,Value:registry.redhat.io/cert-manager/cert-manager-istio-csr-rhel9@sha256:af1ac813b8ee414ef215936f05197bc498bccbd540f3e2a93cb522221ba112bc,ValueFrom:nil,},EnvVar{Name:OPERAND_IMAGE_VERSION,Value:1.18.3,ValueFrom:nil,},EnvVar{Name:ISTIOCSR_OPERAND_IMAGE_VERSION,Value:0.14.2,ValueFrom:nil,},EnvVar{Name:OPERATOR_IMAGE_VERSION,Value:1.18.0,ValueFrom:nil,},EnvVar{Name:OPERATOR_LOG_LEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:TRUSTED_CA_CONFIGMAP_NAME,Value:,ValueFrom:nil,},EnvVar{Name:CLOUD_CREDENTIALS_SECRET_NAME,Value:,ValueFrom:nil,},EnvVar{Name:UNSUPPORTED_ADDON_FEATURES,Value:,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cert-manager-operator.v1.18.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{33554432 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:tmp,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gjxnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000680000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cert-manager-operator-controller-manager-5446d6888b-z6kfl_cert-manager-operator(917ff1a2-c53d-459e-ab3f-7a5c67a7ba88): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1403159883/1\": happened during read: context canceled" logger="UnhandledError" Dec 08 00:20:31 crc kubenswrapper[4745]: E1208 00:20:31.144855 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage1403159883/1\\\": happened during read: context canceled\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" podUID="917ff1a2-c53d-459e-ab3f-7a5c67a7ba88" Dec 08 00:20:31 crc kubenswrapper[4745]: E1208 00:20:31.525643 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911\\\"\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" podUID="917ff1a2-c53d-459e-ab3f-7a5c67a7ba88" Dec 08 00:20:31 crc kubenswrapper[4745]: I1208 00:20:31.892536 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.033193 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4q24\" (UniqueName: \"kubernetes.io/projected/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-kube-api-access-v4q24\") pod \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.033268 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-utilities\") pod \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.033360 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-catalog-content\") pod \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\" (UID: \"5a92aad8-3ca0-4f04-8a46-1feb10039d1a\") " Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.037271 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-utilities" (OuterVolumeSpecName: "utilities") pod "5a92aad8-3ca0-4f04-8a46-1feb10039d1a" (UID: "5a92aad8-3ca0-4f04-8a46-1feb10039d1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.047130 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-kube-api-access-v4q24" (OuterVolumeSpecName: "kube-api-access-v4q24") pod "5a92aad8-3ca0-4f04-8a46-1feb10039d1a" (UID: "5a92aad8-3ca0-4f04-8a46-1feb10039d1a"). InnerVolumeSpecName "kube-api-access-v4q24". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:20:32 crc kubenswrapper[4745]: E1208 00:20:32.095667 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="registry.connect.redhat.com/elastic/elasticsearch:7.17.20" Dec 08 00:20:32 crc kubenswrapper[4745]: E1208 00:20:32.096170 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:elastic-internal-init-filesystem,Image:registry.connect.redhat.com/elastic/elasticsearch:7.17.20,Command:[bash -c /mnt/elastic-internal/scripts/prepare-fs.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:HEADLESS_SERVICE_NAME,Value:elasticsearch-es-default,ValueFrom:nil,},EnvVar{Name:PROBE_PASSWORD_PATH,Value:/mnt/elastic-internal/pod-mounted-users/elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:PROBE_USERNAME,Value:elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:READINESS_PROBE_PROTOCOL,Value:https,ValueFrom:nil,},EnvVar{Name:NSS_SDB_USE_CACHE,Value:no,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:downward-api,ReadOnly:true,MountPath:/mnt/elastic-internal/downward-api,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-bin-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-bin-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config,ReadOnly:true,MountPath:/mnt/elastic-internal/elasticsearch-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-config-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-plugins-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-plugins-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-http-certificates,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/http-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-probe-user,ReadOnly:true,MountPath:/mnt/elastic-internal/pod-mounted-users,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-remote-certificate-authorities,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/transport-remote-certs/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-scripts,ReadOnly:true,MountPath:/mnt/elastic-internal/scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-transport-certificates,ReadOnly:true,MountPath:/mnt/elastic-internal/transport-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-unicast-hosts,ReadOnly:true,MountPath:/mnt/elastic-internal/unicast-hosts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-xpack-file-realm,ReadOnly:true,MountPath:/mnt/elastic-internal/xpack-file-realm,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-data,ReadOnly:false,MountPath:/usr/share/elasticsearch/data,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-logs,ReadOnly:false,MountPath:/usr/share/elasticsearch/logs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tmp-volume,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod elasticsearch-es-default-0_service-telemetry(c18b8080-8e69-4234-9e9d-7ec8cacb62a3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:20:32 crc kubenswrapper[4745]: E1208 00:20:32.097587 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c18b8080-8e69-4234-9e9d-7ec8cacb62a3" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.134789 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.134826 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4q24\" (UniqueName: \"kubernetes.io/projected/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-kube-api-access-v4q24\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.163088 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a92aad8-3ca0-4f04-8a46-1feb10039d1a" (UID: "5a92aad8-3ca0-4f04-8a46-1feb10039d1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.236441 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a92aad8-3ca0-4f04-8a46-1feb10039d1a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.530323 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cqh4k" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.530318 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cqh4k" event={"ID":"5a92aad8-3ca0-4f04-8a46-1feb10039d1a","Type":"ContainerDied","Data":"d807a4a27efb79845d7d0553ebd55d1845ed5ea2312e84862ef835597abde347"} Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.530470 4745 scope.go:117] "RemoveContainer" containerID="f186ccaa9b9643de5832fa89e953302bcc74956c94ee14a9a1be5b2819cedc61" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.534273 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"a0b54804879b59a5315813b6e61fe2985b6017fa236833a33d571f68aadbd8c5"} Dec 08 00:20:32 crc kubenswrapper[4745]: E1208 00:20:32.536795 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c18b8080-8e69-4234-9e9d-7ec8cacb62a3" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.559828 4745 scope.go:117] "RemoveContainer" containerID="f0325e8fd83e18c96da3f99e4724b38b465314071ab6257a0a0f1ac6e49524cc" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.596886 4745 scope.go:117] "RemoveContainer" containerID="a170dec209793afacbf8fd8d4e7cfbfa0a4ae9d5df8fc2291c6006c168efeabd" Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.607897 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cqh4k"] Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.611663 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cqh4k"] Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.672163 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.700850 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 08 00:20:32 crc kubenswrapper[4745]: I1208 00:20:32.890196 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" path="/var/lib/kubelet/pods/5a92aad8-3ca0-4f04-8a46-1feb10039d1a/volumes" Dec 08 00:20:33 crc kubenswrapper[4745]: E1208 00:20:33.547051 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c18b8080-8e69-4234-9e9d-7ec8cacb62a3" Dec 08 00:20:34 crc kubenswrapper[4745]: E1208 00:20:34.552180 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="c18b8080-8e69-4234-9e9d-7ec8cacb62a3" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.625375 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Dec 08 00:20:34 crc kubenswrapper[4745]: E1208 00:20:34.625600 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="registry-server" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.625614 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="registry-server" Dec 08 00:20:34 crc kubenswrapper[4745]: E1208 00:20:34.625626 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="extract-content" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.625633 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="extract-content" Dec 08 00:20:34 crc kubenswrapper[4745]: E1208 00:20:34.625648 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="extract-utilities" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.625654 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="extract-utilities" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.625746 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a92aad8-3ca0-4f04-8a46-1feb10039d1a" containerName="registry-server" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.626463 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.628188 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-sys-config" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.630777 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.631009 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-ca" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.631156 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-global-ca" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.653337 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.771359 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.771732 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.771906 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.772134 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.772301 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.772491 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.772704 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.772952 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.773154 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5gg8\" (UniqueName: \"kubernetes.io/projected/8e0d674f-6a99-480f-9c83-a5f1edd0615e-kube-api-access-d5gg8\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.773333 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.773471 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.773607 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.874683 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.874788 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.874869 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.874913 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5gg8\" (UniqueName: \"kubernetes.io/projected/8e0d674f-6a99-480f-9c83-a5f1edd0615e-kube-api-access-d5gg8\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.874998 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.875038 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.875083 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.875133 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.875186 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.875236 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.875319 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.875386 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.876201 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.876286 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.876511 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.876584 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.876844 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.876862 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.877121 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.877203 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.878491 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.884527 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.888370 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.894591 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5gg8\" (UniqueName: \"kubernetes.io/projected/8e0d674f-6a99-480f-9c83-a5f1edd0615e-kube-api-access-d5gg8\") pod \"service-telemetry-operator-1-build\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:34 crc kubenswrapper[4745]: I1208 00:20:34.953494 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:35 crc kubenswrapper[4745]: I1208 00:20:35.399446 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Dec 08 00:20:35 crc kubenswrapper[4745]: I1208 00:20:35.556943 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"8e0d674f-6a99-480f-9c83-a5f1edd0615e","Type":"ContainerStarted","Data":"663f02a7f84bf8701d288ccf82b0be5378a3a3f5bf21f5cd96ee862935276f98"} Dec 08 00:20:40 crc kubenswrapper[4745]: I1208 00:20:40.586025 4745 generic.go:334] "Generic (PLEG): container finished" podID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerID="b0d18327ee20afe254ec4defa11186b65001ba999004f576c765661c9bd84103" exitCode=0 Dec 08 00:20:40 crc kubenswrapper[4745]: I1208 00:20:40.586149 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"8e0d674f-6a99-480f-9c83-a5f1edd0615e","Type":"ContainerDied","Data":"b0d18327ee20afe254ec4defa11186b65001ba999004f576c765661c9bd84103"} Dec 08 00:20:41 crc kubenswrapper[4745]: I1208 00:20:41.593328 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"8e0d674f-6a99-480f-9c83-a5f1edd0615e","Type":"ContainerStarted","Data":"9343a0c4d4e82363efaa44745b2c728180b2ad2fc5946d1b6471a03400171368"} Dec 08 00:20:41 crc kubenswrapper[4745]: I1208 00:20:41.621046 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-1-build" podStartSLOduration=3.098069705 podStartE2EDuration="7.621020777s" podCreationTimestamp="2025-12-08 00:20:34 +0000 UTC" firstStartedPulling="2025-12-08 00:20:35.406867007 +0000 UTC m=+790.836073317" lastFinishedPulling="2025-12-08 00:20:39.929818079 +0000 UTC m=+795.359024389" observedRunningTime="2025-12-08 00:20:41.616826983 +0000 UTC m=+797.046033323" watchObservedRunningTime="2025-12-08 00:20:41.621020777 +0000 UTC m=+797.050227117" Dec 08 00:20:44 crc kubenswrapper[4745]: I1208 00:20:44.800442 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Dec 08 00:20:44 crc kubenswrapper[4745]: I1208 00:20:44.801222 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-1-build" podUID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerName="docker-build" containerID="cri-o://9343a0c4d4e82363efaa44745b2c728180b2ad2fc5946d1b6471a03400171368" gracePeriod=30 Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.429756 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.431900 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.433582 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-sys-config" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.433974 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-ca" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.434198 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-global-ca" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.451980 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464367 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464448 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464527 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464582 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464610 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464664 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464689 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464767 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464880 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwtp8\" (UniqueName: \"kubernetes.io/projected/f978daae-3f37-4b63-8fd8-f3bad5802243-kube-api-access-jwtp8\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464947 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464974 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.464999 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566046 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566106 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566131 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566161 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566185 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566219 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566241 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566263 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566284 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566300 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566327 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566349 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566357 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwtp8\" (UniqueName: \"kubernetes.io/projected/f978daae-3f37-4b63-8fd8-f3bad5802243-kube-api-access-jwtp8\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566634 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.566710 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.567430 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.567679 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.567779 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.568080 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.568079 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.568512 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.574389 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.579472 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.583434 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwtp8\" (UniqueName: \"kubernetes.io/projected/f978daae-3f37-4b63-8fd8-f3bad5802243-kube-api-access-jwtp8\") pod \"service-telemetry-operator-2-build\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:46 crc kubenswrapper[4745]: I1208 00:20:46.749771 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.226146 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Dec 08 00:20:47 crc kubenswrapper[4745]: W1208 00:20:47.229355 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf978daae_3f37_4b63_8fd8_f3bad5802243.slice/crio-0d9782421d7bdec7cba8a57257de147ae6d2fbfab6f43134e340870b6b54edab WatchSource:0}: Error finding container 0d9782421d7bdec7cba8a57257de147ae6d2fbfab6f43134e340870b6b54edab: Status 404 returned error can't find the container with id 0d9782421d7bdec7cba8a57257de147ae6d2fbfab6f43134e340870b6b54edab Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.631447 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_8e0d674f-6a99-480f-9c83-a5f1edd0615e/docker-build/0.log" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.634075 4745 generic.go:334] "Generic (PLEG): container finished" podID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerID="9343a0c4d4e82363efaa44745b2c728180b2ad2fc5946d1b6471a03400171368" exitCode=1 Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.634127 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"8e0d674f-6a99-480f-9c83-a5f1edd0615e","Type":"ContainerDied","Data":"9343a0c4d4e82363efaa44745b2c728180b2ad2fc5946d1b6471a03400171368"} Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.636376 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"f978daae-3f37-4b63-8fd8-f3bad5802243","Type":"ContainerStarted","Data":"58d2a2ed710256b3e4f6cb06fa31ccf92e0783c1c5f6fb49ea9d1d6814ec5211"} Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.636412 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"f978daae-3f37-4b63-8fd8-f3bad5802243","Type":"ContainerStarted","Data":"0d9782421d7bdec7cba8a57257de147ae6d2fbfab6f43134e340870b6b54edab"} Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.892721 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_8e0d674f-6a99-480f-9c83-a5f1edd0615e/docker-build/0.log" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.893232 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.988049 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-ca-bundles\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989034 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-root\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989067 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5gg8\" (UniqueName: \"kubernetes.io/projected/8e0d674f-6a99-480f-9c83-a5f1edd0615e-kube-api-access-d5gg8\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989121 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-run\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989150 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-proxy-ca-bundles\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989172 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildworkdir\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989196 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-blob-cache\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989253 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-pull\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989289 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-node-pullsecrets\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989314 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-system-configs\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989337 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-push\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989362 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildcachedir\") pod \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\" (UID: \"8e0d674f-6a99-480f-9c83-a5f1edd0615e\") " Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.988992 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.989640 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990033 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990107 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990216 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990204 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990281 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990532 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990570 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990587 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990602 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990612 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990621 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990630 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990638 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8e0d674f-6a99-480f-9c83-a5f1edd0615e-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.990647 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.995060 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e0d674f-6a99-480f-9c83-a5f1edd0615e-kube-api-access-d5gg8" (OuterVolumeSpecName: "kube-api-access-d5gg8") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "kube-api-access-d5gg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:20:47 crc kubenswrapper[4745]: I1208 00:20:47.995170 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.008306 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "8e0d674f-6a99-480f-9c83-a5f1edd0615e" (UID: "8e0d674f-6a99-480f-9c83-a5f1edd0615e"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.091382 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.091415 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8e0d674f-6a99-480f-9c83-a5f1edd0615e-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.091423 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/8e0d674f-6a99-480f-9c83-a5f1edd0615e-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.091431 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8e0d674f-6a99-480f-9c83-a5f1edd0615e-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.091440 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5gg8\" (UniqueName: \"kubernetes.io/projected/8e0d674f-6a99-480f-9c83-a5f1edd0615e-kube-api-access-d5gg8\") on node \"crc\" DevicePath \"\"" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.645551 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c18b8080-8e69-4234-9e9d-7ec8cacb62a3","Type":"ContainerStarted","Data":"b9327a35cd5a6e91c183a12fec1d04f20ca98b6ea37a6e0e3e71c9e47c9252e0"} Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.647304 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_8e0d674f-6a99-480f-9c83-a5f1edd0615e/docker-build/0.log" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.647953 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"8e0d674f-6a99-480f-9c83-a5f1edd0615e","Type":"ContainerDied","Data":"663f02a7f84bf8701d288ccf82b0be5378a3a3f5bf21f5cd96ee862935276f98"} Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.648017 4745 scope.go:117] "RemoveContainer" containerID="9343a0c4d4e82363efaa44745b2c728180b2ad2fc5946d1b6471a03400171368" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.648332 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.699419 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.704477 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Dec 08 00:20:48 crc kubenswrapper[4745]: I1208 00:20:48.893914 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" path="/var/lib/kubelet/pods/8e0d674f-6a99-480f-9c83-a5f1edd0615e/volumes" Dec 08 00:20:49 crc kubenswrapper[4745]: I1208 00:20:49.330407 4745 scope.go:117] "RemoveContainer" containerID="b0d18327ee20afe254ec4defa11186b65001ba999004f576c765661c9bd84103" Dec 08 00:20:49 crc kubenswrapper[4745]: I1208 00:20:49.653709 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" event={"ID":"917ff1a2-c53d-459e-ab3f-7a5c67a7ba88","Type":"ContainerStarted","Data":"5feab6a577e22a491a2bd6b1fff73791fe3f1ba941fe448d687e1baeba0f3c0e"} Dec 08 00:20:49 crc kubenswrapper[4745]: I1208 00:20:49.673712 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-z6kfl" podStartSLOduration=1.439779139 podStartE2EDuration="32.673694681s" podCreationTimestamp="2025-12-08 00:20:17 +0000 UTC" firstStartedPulling="2025-12-08 00:20:18.168459124 +0000 UTC m=+773.597665424" lastFinishedPulling="2025-12-08 00:20:49.402374666 +0000 UTC m=+804.831580966" observedRunningTime="2025-12-08 00:20:49.672821677 +0000 UTC m=+805.102027967" watchObservedRunningTime="2025-12-08 00:20:49.673694681 +0000 UTC m=+805.102900981" Dec 08 00:20:50 crc kubenswrapper[4745]: I1208 00:20:50.662715 4745 generic.go:334] "Generic (PLEG): container finished" podID="c18b8080-8e69-4234-9e9d-7ec8cacb62a3" containerID="b9327a35cd5a6e91c183a12fec1d04f20ca98b6ea37a6e0e3e71c9e47c9252e0" exitCode=0 Dec 08 00:20:50 crc kubenswrapper[4745]: I1208 00:20:50.662942 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c18b8080-8e69-4234-9e9d-7ec8cacb62a3","Type":"ContainerDied","Data":"b9327a35cd5a6e91c183a12fec1d04f20ca98b6ea37a6e0e3e71c9e47c9252e0"} Dec 08 00:20:51 crc kubenswrapper[4745]: I1208 00:20:51.681129 4745 generic.go:334] "Generic (PLEG): container finished" podID="c18b8080-8e69-4234-9e9d-7ec8cacb62a3" containerID="bd20fb77b614ad33f5aaae5e82206291b567f8195c27051b6fa6e6509c6758f7" exitCode=0 Dec 08 00:20:51 crc kubenswrapper[4745]: I1208 00:20:51.681176 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c18b8080-8e69-4234-9e9d-7ec8cacb62a3","Type":"ContainerDied","Data":"bd20fb77b614ad33f5aaae5e82206291b567f8195c27051b6fa6e6509c6758f7"} Dec 08 00:20:52 crc kubenswrapper[4745]: I1208 00:20:52.696502 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"c18b8080-8e69-4234-9e9d-7ec8cacb62a3","Type":"ContainerStarted","Data":"280f40018882aa3d3a0d21cd6208003a7bafe3c6f0222604534532b3b6d00a2d"} Dec 08 00:20:52 crc kubenswrapper[4745]: I1208 00:20:52.697076 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:20:52 crc kubenswrapper[4745]: I1208 00:20:52.736365 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=9.417297203 podStartE2EDuration="49.73634468s" podCreationTimestamp="2025-12-08 00:20:03 +0000 UTC" firstStartedPulling="2025-12-08 00:20:08.072769793 +0000 UTC m=+763.501976093" lastFinishedPulling="2025-12-08 00:20:48.39181727 +0000 UTC m=+803.821023570" observedRunningTime="2025-12-08 00:20:52.730536852 +0000 UTC m=+808.159743152" watchObservedRunningTime="2025-12-08 00:20:52.73634468 +0000 UTC m=+808.165550980" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.183533 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-p4kck"] Dec 08 00:20:53 crc kubenswrapper[4745]: E1208 00:20:53.183846 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerName="docker-build" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.183871 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerName="docker-build" Dec 08 00:20:53 crc kubenswrapper[4745]: E1208 00:20:53.183886 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerName="manage-dockerfile" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.183897 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerName="manage-dockerfile" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.184055 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e0d674f-6a99-480f-9c83-a5f1edd0615e" containerName="docker-build" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.184608 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.189150 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.189272 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.190685 4745 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-kmzkm" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.194501 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-p4kck"] Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.357800 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c493caf0-3e42-4c34-8574-a46e6fa2db3f-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-p4kck\" (UID: \"c493caf0-3e42-4c34-8574-a46e6fa2db3f\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.358195 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b56r\" (UniqueName: \"kubernetes.io/projected/c493caf0-3e42-4c34-8574-a46e6fa2db3f-kube-api-access-5b56r\") pod \"cert-manager-webhook-f4fb5df64-p4kck\" (UID: \"c493caf0-3e42-4c34-8574-a46e6fa2db3f\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.458877 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c493caf0-3e42-4c34-8574-a46e6fa2db3f-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-p4kck\" (UID: \"c493caf0-3e42-4c34-8574-a46e6fa2db3f\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.459032 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b56r\" (UniqueName: \"kubernetes.io/projected/c493caf0-3e42-4c34-8574-a46e6fa2db3f-kube-api-access-5b56r\") pod \"cert-manager-webhook-f4fb5df64-p4kck\" (UID: \"c493caf0-3e42-4c34-8574-a46e6fa2db3f\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.484247 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c493caf0-3e42-4c34-8574-a46e6fa2db3f-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-p4kck\" (UID: \"c493caf0-3e42-4c34-8574-a46e6fa2db3f\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.496280 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b56r\" (UniqueName: \"kubernetes.io/projected/c493caf0-3e42-4c34-8574-a46e6fa2db3f-kube-api-access-5b56r\") pod \"cert-manager-webhook-f4fb5df64-p4kck\" (UID: \"c493caf0-3e42-4c34-8574-a46e6fa2db3f\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.503954 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:20:53 crc kubenswrapper[4745]: I1208 00:20:53.849827 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-p4kck"] Dec 08 00:20:54 crc kubenswrapper[4745]: I1208 00:20:54.717273 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" event={"ID":"c493caf0-3e42-4c34-8574-a46e6fa2db3f","Type":"ContainerStarted","Data":"7e371846dd49fcd2e1b7423283633811e98e828cf9c7ce9cb196c8c1f04c8f20"} Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.469835 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd"] Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.471057 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.478563 4745 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-vcv6l" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.484160 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd"] Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.585179 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rwdw\" (UniqueName: \"kubernetes.io/projected/589ab375-83a9-43be-a945-92245f00f756-kube-api-access-2rwdw\") pod \"cert-manager-cainjector-855d9ccff4-zn2dd\" (UID: \"589ab375-83a9-43be-a945-92245f00f756\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.585239 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/589ab375-83a9-43be-a945-92245f00f756-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-zn2dd\" (UID: \"589ab375-83a9-43be-a945-92245f00f756\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.686850 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rwdw\" (UniqueName: \"kubernetes.io/projected/589ab375-83a9-43be-a945-92245f00f756-kube-api-access-2rwdw\") pod \"cert-manager-cainjector-855d9ccff4-zn2dd\" (UID: \"589ab375-83a9-43be-a945-92245f00f756\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.687288 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/589ab375-83a9-43be-a945-92245f00f756-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-zn2dd\" (UID: \"589ab375-83a9-43be-a945-92245f00f756\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.707130 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/589ab375-83a9-43be-a945-92245f00f756-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-zn2dd\" (UID: \"589ab375-83a9-43be-a945-92245f00f756\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.724301 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rwdw\" (UniqueName: \"kubernetes.io/projected/589ab375-83a9-43be-a945-92245f00f756-kube-api-access-2rwdw\") pod \"cert-manager-cainjector-855d9ccff4-zn2dd\" (UID: \"589ab375-83a9-43be-a945-92245f00f756\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:55 crc kubenswrapper[4745]: I1208 00:20:55.793005 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" Dec 08 00:20:56 crc kubenswrapper[4745]: I1208 00:20:56.093009 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd"] Dec 08 00:20:56 crc kubenswrapper[4745]: E1208 00:20:56.128685 4745 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.201:60498->38.102.83.201:39277: write tcp 38.102.83.201:60498->38.102.83.201:39277: write: broken pipe Dec 08 00:20:56 crc kubenswrapper[4745]: I1208 00:20:56.733190 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" event={"ID":"589ab375-83a9-43be-a945-92245f00f756","Type":"ContainerStarted","Data":"d07a13645d3bea5be596247c4ef59513e78cf2868989e5f8a891cae7c2196a25"} Dec 08 00:20:56 crc kubenswrapper[4745]: I1208 00:20:56.735563 4745 generic.go:334] "Generic (PLEG): container finished" podID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerID="58d2a2ed710256b3e4f6cb06fa31ccf92e0783c1c5f6fb49ea9d1d6814ec5211" exitCode=0 Dec 08 00:20:56 crc kubenswrapper[4745]: I1208 00:20:56.735594 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"f978daae-3f37-4b63-8fd8-f3bad5802243","Type":"ContainerDied","Data":"58d2a2ed710256b3e4f6cb06fa31ccf92e0783c1c5f6fb49ea9d1d6814ec5211"} Dec 08 00:20:57 crc kubenswrapper[4745]: I1208 00:20:57.746335 4745 generic.go:334] "Generic (PLEG): container finished" podID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerID="e0e03ab4abbc8ee5040d708abacb8e712c5a77b878ae7f7493459c9c05d5cdde" exitCode=0 Dec 08 00:20:57 crc kubenswrapper[4745]: I1208 00:20:57.746420 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"f978daae-3f37-4b63-8fd8-f3bad5802243","Type":"ContainerDied","Data":"e0e03ab4abbc8ee5040d708abacb8e712c5a77b878ae7f7493459c9c05d5cdde"} Dec 08 00:20:57 crc kubenswrapper[4745]: I1208 00:20:57.781522 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_f978daae-3f37-4b63-8fd8-f3bad5802243/manage-dockerfile/0.log" Dec 08 00:20:58 crc kubenswrapper[4745]: I1208 00:20:58.767359 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"f978daae-3f37-4b63-8fd8-f3bad5802243","Type":"ContainerStarted","Data":"1c3af78c3fed3a8434dd5364b526b9ee43d98d7cff6783a5072969c50c4cec66"} Dec 08 00:20:58 crc kubenswrapper[4745]: I1208 00:20:58.800700 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-2-build" podStartSLOduration=12.800676997 podStartE2EDuration="12.800676997s" podCreationTimestamp="2025-12-08 00:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:20:58.79747358 +0000 UTC m=+814.226679880" watchObservedRunningTime="2025-12-08 00:20:58.800676997 +0000 UTC m=+814.229883337" Dec 08 00:21:03 crc kubenswrapper[4745]: I1208 00:21:03.845203 4745 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="c18b8080-8e69-4234-9e9d-7ec8cacb62a3" containerName="elasticsearch" probeResult="failure" output=< Dec 08 00:21:03 crc kubenswrapper[4745]: {"timestamp": "2025-12-08T00:21:03+00:00", "message": "readiness probe failed", "curl_rc": "7"} Dec 08 00:21:03 crc kubenswrapper[4745]: > Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.187818 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qhxds"] Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.189630 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.198512 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qhxds"] Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.248958 4745 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-wq276" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.263289 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/efcc61c0-a81a-405d-b902-029423222c50-bound-sa-token\") pod \"cert-manager-86cb77c54b-qhxds\" (UID: \"efcc61c0-a81a-405d-b902-029423222c50\") " pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.263397 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcmrn\" (UniqueName: \"kubernetes.io/projected/efcc61c0-a81a-405d-b902-029423222c50-kube-api-access-jcmrn\") pod \"cert-manager-86cb77c54b-qhxds\" (UID: \"efcc61c0-a81a-405d-b902-029423222c50\") " pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.364796 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/efcc61c0-a81a-405d-b902-029423222c50-bound-sa-token\") pod \"cert-manager-86cb77c54b-qhxds\" (UID: \"efcc61c0-a81a-405d-b902-029423222c50\") " pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.364918 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcmrn\" (UniqueName: \"kubernetes.io/projected/efcc61c0-a81a-405d-b902-029423222c50-kube-api-access-jcmrn\") pod \"cert-manager-86cb77c54b-qhxds\" (UID: \"efcc61c0-a81a-405d-b902-029423222c50\") " pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.395171 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcmrn\" (UniqueName: \"kubernetes.io/projected/efcc61c0-a81a-405d-b902-029423222c50-kube-api-access-jcmrn\") pod \"cert-manager-86cb77c54b-qhxds\" (UID: \"efcc61c0-a81a-405d-b902-029423222c50\") " pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.397475 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/efcc61c0-a81a-405d-b902-029423222c50-bound-sa-token\") pod \"cert-manager-86cb77c54b-qhxds\" (UID: \"efcc61c0-a81a-405d-b902-029423222c50\") " pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:07 crc kubenswrapper[4745]: I1208 00:21:07.576897 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-qhxds" Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.379896 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qhxds"] Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.835377 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-qhxds" event={"ID":"efcc61c0-a81a-405d-b902-029423222c50","Type":"ContainerStarted","Data":"5d83970b575574454b8d523e641ea34d23826b36e584e658e2951d9fa69dd2ce"} Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.835723 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-qhxds" event={"ID":"efcc61c0-a81a-405d-b902-029423222c50","Type":"ContainerStarted","Data":"7282ddd587adf36043ef7fd40c285f9d604b8a0bff3b5fa4b1788c0913a77d11"} Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.837871 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" event={"ID":"589ab375-83a9-43be-a945-92245f00f756","Type":"ContainerStarted","Data":"9bf9e306af263698db9d136f0af662b3887ccd92b39208cae9685fa88a7bfbb2"} Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.839843 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" event={"ID":"c493caf0-3e42-4c34-8574-a46e6fa2db3f","Type":"ContainerStarted","Data":"289e8157da34be6378ae2a45c0aad168564adbd6c5ed47e53bbdc3ed0650f4c4"} Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.840294 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.874235 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-zn2dd" podStartSLOduration=1.966501233 podStartE2EDuration="13.874221265s" podCreationTimestamp="2025-12-08 00:20:55 +0000 UTC" firstStartedPulling="2025-12-08 00:20:56.111257951 +0000 UTC m=+811.540464251" lastFinishedPulling="2025-12-08 00:21:08.018977963 +0000 UTC m=+823.448184283" observedRunningTime="2025-12-08 00:21:08.873262618 +0000 UTC m=+824.302468918" watchObservedRunningTime="2025-12-08 00:21:08.874221265 +0000 UTC m=+824.303427555" Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.874872 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-qhxds" podStartSLOduration=1.874867652 podStartE2EDuration="1.874867652s" podCreationTimestamp="2025-12-08 00:21:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:21:08.854081886 +0000 UTC m=+824.283288196" watchObservedRunningTime="2025-12-08 00:21:08.874867652 +0000 UTC m=+824.304073952" Dec 08 00:21:08 crc kubenswrapper[4745]: I1208 00:21:08.906615 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" podStartSLOduration=1.7629896710000001 podStartE2EDuration="15.906598097s" podCreationTimestamp="2025-12-08 00:20:53 +0000 UTC" firstStartedPulling="2025-12-08 00:20:53.857092458 +0000 UTC m=+809.286298758" lastFinishedPulling="2025-12-08 00:21:08.000700874 +0000 UTC m=+823.429907184" observedRunningTime="2025-12-08 00:21:08.904265513 +0000 UTC m=+824.333471813" watchObservedRunningTime="2025-12-08 00:21:08.906598097 +0000 UTC m=+824.335804397" Dec 08 00:21:09 crc kubenswrapper[4745]: I1208 00:21:09.185627 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Dec 08 00:21:13 crc kubenswrapper[4745]: I1208 00:21:13.506961 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-p4kck" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.616222 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g64px"] Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.617628 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.626867 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g64px"] Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.696832 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-catalog-content\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.696917 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25mwh\" (UniqueName: \"kubernetes.io/projected/9ff7b355-2287-49e2-a465-4702c44bb2a4-kube-api-access-25mwh\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.697015 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-utilities\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.798229 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-utilities\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.798393 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-catalog-content\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.798463 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25mwh\" (UniqueName: \"kubernetes.io/projected/9ff7b355-2287-49e2-a465-4702c44bb2a4-kube-api-access-25mwh\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.798758 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-utilities\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.798766 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-catalog-content\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.825961 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25mwh\" (UniqueName: \"kubernetes.io/projected/9ff7b355-2287-49e2-a465-4702c44bb2a4-kube-api-access-25mwh\") pod \"community-operators-g64px\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:39 crc kubenswrapper[4745]: I1208 00:22:39.933465 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:40 crc kubenswrapper[4745]: I1208 00:22:40.212375 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g64px"] Dec 08 00:22:40 crc kubenswrapper[4745]: I1208 00:22:40.496024 4745 generic.go:334] "Generic (PLEG): container finished" podID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerID="de2aa516d6b4deef941341e723c4d767c168b4d84d05a327af7c0450a9476603" exitCode=0 Dec 08 00:22:40 crc kubenswrapper[4745]: I1208 00:22:40.496064 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g64px" event={"ID":"9ff7b355-2287-49e2-a465-4702c44bb2a4","Type":"ContainerDied","Data":"de2aa516d6b4deef941341e723c4d767c168b4d84d05a327af7c0450a9476603"} Dec 08 00:22:40 crc kubenswrapper[4745]: I1208 00:22:40.496089 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g64px" event={"ID":"9ff7b355-2287-49e2-a465-4702c44bb2a4","Type":"ContainerStarted","Data":"a4929efcacc85c0a76813bcfab0adf2adec961c82b1a8f1f0d8703b852ba618f"} Dec 08 00:22:41 crc kubenswrapper[4745]: I1208 00:22:41.506576 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g64px" event={"ID":"9ff7b355-2287-49e2-a465-4702c44bb2a4","Type":"ContainerStarted","Data":"6da98d3ecd4ff5a10553959a6c94a1fb8fe8878881ff692ec7eadecbea7dcc76"} Dec 08 00:22:42 crc kubenswrapper[4745]: I1208 00:22:42.515183 4745 generic.go:334] "Generic (PLEG): container finished" podID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerID="6da98d3ecd4ff5a10553959a6c94a1fb8fe8878881ff692ec7eadecbea7dcc76" exitCode=0 Dec 08 00:22:42 crc kubenswrapper[4745]: I1208 00:22:42.515230 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g64px" event={"ID":"9ff7b355-2287-49e2-a465-4702c44bb2a4","Type":"ContainerDied","Data":"6da98d3ecd4ff5a10553959a6c94a1fb8fe8878881ff692ec7eadecbea7dcc76"} Dec 08 00:22:43 crc kubenswrapper[4745]: I1208 00:22:43.524220 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g64px" event={"ID":"9ff7b355-2287-49e2-a465-4702c44bb2a4","Type":"ContainerStarted","Data":"9f11ebe9174605af80d8929227ca6cca71a7af5f1457069bc3a8ce0ca642f4a3"} Dec 08 00:22:43 crc kubenswrapper[4745]: I1208 00:22:43.545175 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g64px" podStartSLOduration=2.722870454 podStartE2EDuration="4.545161037s" podCreationTimestamp="2025-12-08 00:22:39 +0000 UTC" firstStartedPulling="2025-12-08 00:22:40.49775281 +0000 UTC m=+915.926959120" lastFinishedPulling="2025-12-08 00:22:42.320043393 +0000 UTC m=+917.749249703" observedRunningTime="2025-12-08 00:22:43.543455391 +0000 UTC m=+918.972661701" watchObservedRunningTime="2025-12-08 00:22:43.545161037 +0000 UTC m=+918.974367337" Dec 08 00:22:49 crc kubenswrapper[4745]: I1208 00:22:49.933741 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:49 crc kubenswrapper[4745]: I1208 00:22:49.936083 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:49 crc kubenswrapper[4745]: I1208 00:22:49.991332 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:50 crc kubenswrapper[4745]: I1208 00:22:50.613881 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:50 crc kubenswrapper[4745]: I1208 00:22:50.660086 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g64px"] Dec 08 00:22:52 crc kubenswrapper[4745]: I1208 00:22:52.460291 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:22:52 crc kubenswrapper[4745]: I1208 00:22:52.460373 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:22:52 crc kubenswrapper[4745]: I1208 00:22:52.591874 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g64px" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="registry-server" containerID="cri-o://9f11ebe9174605af80d8929227ca6cca71a7af5f1457069bc3a8ce0ca642f4a3" gracePeriod=2 Dec 08 00:22:53 crc kubenswrapper[4745]: I1208 00:22:53.604178 4745 generic.go:334] "Generic (PLEG): container finished" podID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerID="9f11ebe9174605af80d8929227ca6cca71a7af5f1457069bc3a8ce0ca642f4a3" exitCode=0 Dec 08 00:22:53 crc kubenswrapper[4745]: I1208 00:22:53.604244 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g64px" event={"ID":"9ff7b355-2287-49e2-a465-4702c44bb2a4","Type":"ContainerDied","Data":"9f11ebe9174605af80d8929227ca6cca71a7af5f1457069bc3a8ce0ca642f4a3"} Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.081358 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.216806 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25mwh\" (UniqueName: \"kubernetes.io/projected/9ff7b355-2287-49e2-a465-4702c44bb2a4-kube-api-access-25mwh\") pod \"9ff7b355-2287-49e2-a465-4702c44bb2a4\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.217036 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-catalog-content\") pod \"9ff7b355-2287-49e2-a465-4702c44bb2a4\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.217116 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-utilities\") pod \"9ff7b355-2287-49e2-a465-4702c44bb2a4\" (UID: \"9ff7b355-2287-49e2-a465-4702c44bb2a4\") " Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.218851 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-utilities" (OuterVolumeSpecName: "utilities") pod "9ff7b355-2287-49e2-a465-4702c44bb2a4" (UID: "9ff7b355-2287-49e2-a465-4702c44bb2a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.221710 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ff7b355-2287-49e2-a465-4702c44bb2a4-kube-api-access-25mwh" (OuterVolumeSpecName: "kube-api-access-25mwh") pod "9ff7b355-2287-49e2-a465-4702c44bb2a4" (UID: "9ff7b355-2287-49e2-a465-4702c44bb2a4"). InnerVolumeSpecName "kube-api-access-25mwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.273489 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ff7b355-2287-49e2-a465-4702c44bb2a4" (UID: "9ff7b355-2287-49e2-a465-4702c44bb2a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.319546 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.319597 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25mwh\" (UniqueName: \"kubernetes.io/projected/9ff7b355-2287-49e2-a465-4702c44bb2a4-kube-api-access-25mwh\") on node \"crc\" DevicePath \"\"" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.319610 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ff7b355-2287-49e2-a465-4702c44bb2a4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.613514 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g64px" event={"ID":"9ff7b355-2287-49e2-a465-4702c44bb2a4","Type":"ContainerDied","Data":"a4929efcacc85c0a76813bcfab0adf2adec961c82b1a8f1f0d8703b852ba618f"} Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.614381 4745 scope.go:117] "RemoveContainer" containerID="9f11ebe9174605af80d8929227ca6cca71a7af5f1457069bc3a8ce0ca642f4a3" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.613580 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g64px" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.642026 4745 scope.go:117] "RemoveContainer" containerID="6da98d3ecd4ff5a10553959a6c94a1fb8fe8878881ff692ec7eadecbea7dcc76" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.655279 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g64px"] Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.664406 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g64px"] Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.683171 4745 scope.go:117] "RemoveContainer" containerID="de2aa516d6b4deef941341e723c4d767c168b4d84d05a327af7c0450a9476603" Dec 08 00:22:54 crc kubenswrapper[4745]: I1208 00:22:54.892264 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" path="/var/lib/kubelet/pods/9ff7b355-2287-49e2-a465-4702c44bb2a4/volumes" Dec 08 00:22:58 crc kubenswrapper[4745]: I1208 00:22:58.642410 4745 generic.go:334] "Generic (PLEG): container finished" podID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerID="1c3af78c3fed3a8434dd5364b526b9ee43d98d7cff6783a5072969c50c4cec66" exitCode=0 Dec 08 00:22:58 crc kubenswrapper[4745]: I1208 00:22:58.642486 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"f978daae-3f37-4b63-8fd8-f3bad5802243","Type":"ContainerDied","Data":"1c3af78c3fed3a8434dd5364b526b9ee43d98d7cff6783a5072969c50c4cec66"} Dec 08 00:22:59 crc kubenswrapper[4745]: I1208 00:22:59.920140 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.087560 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-buildcachedir\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.087909 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-root\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.087838 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.087997 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-system-configs\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.088608 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095076 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-push\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095143 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-buildworkdir\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095169 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-proxy-ca-bundles\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095213 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-run\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095244 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-pull\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095289 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-build-blob-cache\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095310 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwtp8\" (UniqueName: \"kubernetes.io/projected/f978daae-3f37-4b63-8fd8-f3bad5802243-kube-api-access-jwtp8\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095367 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-node-pullsecrets\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095391 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-ca-bundles\") pod \"f978daae-3f37-4b63-8fd8-f3bad5802243\" (UID: \"f978daae-3f37-4b63-8fd8-f3bad5802243\") " Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095797 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095820 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.095829 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.096573 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.096636 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.096779 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.136839 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.155812 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.156696 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.157833 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f978daae-3f37-4b63-8fd8-f3bad5802243-kube-api-access-jwtp8" (OuterVolumeSpecName: "kube-api-access-jwtp8") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "kube-api-access-jwtp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197513 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197630 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197674 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197685 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197694 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f978daae-3f37-4b63-8fd8-f3bad5802243-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197704 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwtp8\" (UniqueName: \"kubernetes.io/projected/f978daae-3f37-4b63-8fd8-f3bad5802243-kube-api-access-jwtp8\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197713 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f978daae-3f37-4b63-8fd8-f3bad5802243-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.197721 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f978daae-3f37-4b63-8fd8-f3bad5802243-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.285419 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.302332 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.659267 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"f978daae-3f37-4b63-8fd8-f3bad5802243","Type":"ContainerDied","Data":"0d9782421d7bdec7cba8a57257de147ae6d2fbfab6f43134e340870b6b54edab"} Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.659314 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d9782421d7bdec7cba8a57257de147ae6d2fbfab6f43134e340870b6b54edab" Dec 08 00:23:00 crc kubenswrapper[4745]: I1208 00:23:00.659392 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Dec 08 00:23:02 crc kubenswrapper[4745]: I1208 00:23:02.533073 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "f978daae-3f37-4b63-8fd8-f3bad5802243" (UID: "f978daae-3f37-4b63-8fd8-f3bad5802243"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:02 crc kubenswrapper[4745]: I1208 00:23:02.533473 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f978daae-3f37-4b63-8fd8-f3bad5802243-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.780688 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Dec 08 00:23:04 crc kubenswrapper[4745]: E1208 00:23:04.781301 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerName="git-clone" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781315 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerName="git-clone" Dec 08 00:23:04 crc kubenswrapper[4745]: E1208 00:23:04.781328 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerName="manage-dockerfile" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781336 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerName="manage-dockerfile" Dec 08 00:23:04 crc kubenswrapper[4745]: E1208 00:23:04.781348 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="extract-utilities" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781355 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="extract-utilities" Dec 08 00:23:04 crc kubenswrapper[4745]: E1208 00:23:04.781363 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="registry-server" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781370 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="registry-server" Dec 08 00:23:04 crc kubenswrapper[4745]: E1208 00:23:04.781383 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerName="docker-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781389 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerName="docker-build" Dec 08 00:23:04 crc kubenswrapper[4745]: E1208 00:23:04.781401 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="extract-content" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781407 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="extract-content" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781739 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f978daae-3f37-4b63-8fd8-f3bad5802243" containerName="docker-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.781752 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ff7b355-2287-49e2-a465-4702c44bb2a4" containerName="registry-server" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.782582 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.784938 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-ca" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.785070 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.785164 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-global-ca" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.787073 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-sys-config" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.798970 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.868667 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.868749 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.868790 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.868832 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.868875 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.868906 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.869059 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.869165 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.869251 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7bnw\" (UniqueName: \"kubernetes.io/projected/4ecee30b-4d20-4604-a896-b79d5f18ba2c-kube-api-access-v7bnw\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.869290 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.869363 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.869423 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.974703 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.974813 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.974868 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.974909 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.974979 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975017 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975052 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975092 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975137 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975248 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7bnw\" (UniqueName: \"kubernetes.io/projected/4ecee30b-4d20-4604-a896-b79d5f18ba2c-kube-api-access-v7bnw\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975257 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975281 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975365 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975426 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975454 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975582 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975584 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.975660 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.976000 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.976531 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.976896 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.987717 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.991331 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:04 crc kubenswrapper[4745]: I1208 00:23:04.997094 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7bnw\" (UniqueName: \"kubernetes.io/projected/4ecee30b-4d20-4604-a896-b79d5f18ba2c-kube-api-access-v7bnw\") pod \"smart-gateway-operator-1-build\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:05 crc kubenswrapper[4745]: I1208 00:23:05.097901 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:05 crc kubenswrapper[4745]: I1208 00:23:05.549687 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Dec 08 00:23:05 crc kubenswrapper[4745]: I1208 00:23:05.693164 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4ecee30b-4d20-4604-a896-b79d5f18ba2c","Type":"ContainerStarted","Data":"5f228ddc8c15156d9e7add9ad1a15b783c288fb86fd385ef2ca6f4067a57e4b5"} Dec 08 00:23:06 crc kubenswrapper[4745]: I1208 00:23:06.711014 4745 generic.go:334] "Generic (PLEG): container finished" podID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerID="e9b12dc089e9f97b941bb3e58d2032f4ef39a0d8e44d8e631a3689c35afe9d7e" exitCode=0 Dec 08 00:23:06 crc kubenswrapper[4745]: I1208 00:23:06.711337 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4ecee30b-4d20-4604-a896-b79d5f18ba2c","Type":"ContainerDied","Data":"e9b12dc089e9f97b941bb3e58d2032f4ef39a0d8e44d8e631a3689c35afe9d7e"} Dec 08 00:23:07 crc kubenswrapper[4745]: I1208 00:23:07.721343 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4ecee30b-4d20-4604-a896-b79d5f18ba2c","Type":"ContainerStarted","Data":"05663802dbdb5116da2bf005396f1e79d83b8366c560ee10631560d5c81d458b"} Dec 08 00:23:07 crc kubenswrapper[4745]: I1208 00:23:07.753264 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-1-build" podStartSLOduration=3.753237884 podStartE2EDuration="3.753237884s" podCreationTimestamp="2025-12-08 00:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:23:07.747733077 +0000 UTC m=+943.176939437" watchObservedRunningTime="2025-12-08 00:23:07.753237884 +0000 UTC m=+943.182444194" Dec 08 00:23:15 crc kubenswrapper[4745]: I1208 00:23:15.702284 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Dec 08 00:23:15 crc kubenswrapper[4745]: I1208 00:23:15.703299 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/smart-gateway-operator-1-build" podUID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerName="docker-build" containerID="cri-o://05663802dbdb5116da2bf005396f1e79d83b8366c560ee10631560d5c81d458b" gracePeriod=30 Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.354911 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.357682 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.361762 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-global-ca" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.362343 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-ca" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.363033 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-sys-config" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.405534 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.452866 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.452953 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.452972 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453003 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453067 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453086 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453106 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttjrx\" (UniqueName: \"kubernetes.io/projected/5e2df92e-e75d-4de2-b225-43d15f8b64d6-kube-api-access-ttjrx\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453137 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453158 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453269 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453327 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.453394 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.554088 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttjrx\" (UniqueName: \"kubernetes.io/projected/5e2df92e-e75d-4de2-b225-43d15f8b64d6-kube-api-access-ttjrx\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.554514 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.554647 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.554760 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.554875 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555018 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555125 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555243 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555349 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555491 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555628 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555733 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555745 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.555983 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.556053 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.556074 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.556326 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.556436 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.556490 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.557055 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.557564 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.560617 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.563494 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.570292 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttjrx\" (UniqueName: \"kubernetes.io/projected/5e2df92e-e75d-4de2-b225-43d15f8b64d6-kube-api-access-ttjrx\") pod \"smart-gateway-operator-2-build\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:17 crc kubenswrapper[4745]: I1208 00:23:17.679204 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:23:18 crc kubenswrapper[4745]: I1208 00:23:18.124823 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Dec 08 00:23:18 crc kubenswrapper[4745]: I1208 00:23:18.791399 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"5e2df92e-e75d-4de2-b225-43d15f8b64d6","Type":"ContainerStarted","Data":"6cc58bb956ce6521bc9cc55d373ca087a7bb0c9932b8c488137cb54a11cbc2d8"} Dec 08 00:23:18 crc kubenswrapper[4745]: I1208 00:23:18.791952 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"5e2df92e-e75d-4de2-b225-43d15f8b64d6","Type":"ContainerStarted","Data":"27b1778fe82900ad6f42bddbd0399d2d9ce5c657b716d0f31308e8abd7146232"} Dec 08 00:23:18 crc kubenswrapper[4745]: I1208 00:23:18.795456 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_4ecee30b-4d20-4604-a896-b79d5f18ba2c/docker-build/0.log" Dec 08 00:23:18 crc kubenswrapper[4745]: I1208 00:23:18.797337 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4ecee30b-4d20-4604-a896-b79d5f18ba2c","Type":"ContainerDied","Data":"05663802dbdb5116da2bf005396f1e79d83b8366c560ee10631560d5c81d458b"} Dec 08 00:23:18 crc kubenswrapper[4745]: I1208 00:23:18.797316 4745 generic.go:334] "Generic (PLEG): container finished" podID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerID="05663802dbdb5116da2bf005396f1e79d83b8366c560ee10631560d5c81d458b" exitCode=1 Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.051662 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_4ecee30b-4d20-4604-a896-b79d5f18ba2c/docker-build/0.log" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.052766 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075423 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-push\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075465 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-run\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075489 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildcachedir\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075514 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-node-pullsecrets\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075570 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7bnw\" (UniqueName: \"kubernetes.io/projected/4ecee30b-4d20-4604-a896-b79d5f18ba2c-kube-api-access-v7bnw\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075607 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-proxy-ca-bundles\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075635 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-ca-bundles\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075653 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075676 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildworkdir\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075668 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075706 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-system-configs\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075740 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-pull\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.075811 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-root\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.076448 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.076775 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.076818 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.076964 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077069 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-blob-cache\") pod \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\" (UID: \"4ecee30b-4d20-4604-a896-b79d5f18ba2c\") " Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077481 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077501 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077513 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077528 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077538 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077547 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4ecee30b-4d20-4604-a896-b79d5f18ba2c-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.077730 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.085738 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ecee30b-4d20-4604-a896-b79d5f18ba2c-kube-api-access-v7bnw" (OuterVolumeSpecName: "kube-api-access-v7bnw") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "kube-api-access-v7bnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.086032 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.088132 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.178318 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.178619 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7bnw\" (UniqueName: \"kubernetes.io/projected/4ecee30b-4d20-4604-a896-b79d5f18ba2c-kube-api-access-v7bnw\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.178715 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.178832 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/4ecee30b-4d20-4604-a896-b79d5f18ba2c-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.506921 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.585584 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.822597 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_4ecee30b-4d20-4604-a896-b79d5f18ba2c/docker-build/0.log" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.824155 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"4ecee30b-4d20-4604-a896-b79d5f18ba2c","Type":"ContainerDied","Data":"5f228ddc8c15156d9e7add9ad1a15b783c288fb86fd385ef2ca6f4067a57e4b5"} Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.824212 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.824241 4745 scope.go:117] "RemoveContainer" containerID="05663802dbdb5116da2bf005396f1e79d83b8366c560ee10631560d5c81d458b" Dec 08 00:23:19 crc kubenswrapper[4745]: I1208 00:23:19.903838 4745 scope.go:117] "RemoveContainer" containerID="e9b12dc089e9f97b941bb3e58d2032f4ef39a0d8e44d8e631a3689c35afe9d7e" Dec 08 00:23:20 crc kubenswrapper[4745]: I1208 00:23:20.493769 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "4ecee30b-4d20-4604-a896-b79d5f18ba2c" (UID: "4ecee30b-4d20-4604-a896-b79d5f18ba2c"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:23:20 crc kubenswrapper[4745]: I1208 00:23:20.500214 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/4ecee30b-4d20-4604-a896-b79d5f18ba2c-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:23:20 crc kubenswrapper[4745]: I1208 00:23:20.766517 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Dec 08 00:23:20 crc kubenswrapper[4745]: I1208 00:23:20.773897 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Dec 08 00:23:20 crc kubenswrapper[4745]: I1208 00:23:20.830262 4745 generic.go:334] "Generic (PLEG): container finished" podID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerID="6cc58bb956ce6521bc9cc55d373ca087a7bb0c9932b8c488137cb54a11cbc2d8" exitCode=0 Dec 08 00:23:20 crc kubenswrapper[4745]: I1208 00:23:20.830344 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"5e2df92e-e75d-4de2-b225-43d15f8b64d6","Type":"ContainerDied","Data":"6cc58bb956ce6521bc9cc55d373ca087a7bb0c9932b8c488137cb54a11cbc2d8"} Dec 08 00:23:20 crc kubenswrapper[4745]: I1208 00:23:20.889412 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" path="/var/lib/kubelet/pods/4ecee30b-4d20-4604-a896-b79d5f18ba2c/volumes" Dec 08 00:23:21 crc kubenswrapper[4745]: I1208 00:23:21.840061 4745 generic.go:334] "Generic (PLEG): container finished" podID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerID="efdfcd6165507924f8a92d2f2bf95939a24761c695bcb377a80313795d2abf3b" exitCode=0 Dec 08 00:23:21 crc kubenswrapper[4745]: I1208 00:23:21.840107 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"5e2df92e-e75d-4de2-b225-43d15f8b64d6","Type":"ContainerDied","Data":"efdfcd6165507924f8a92d2f2bf95939a24761c695bcb377a80313795d2abf3b"} Dec 08 00:23:21 crc kubenswrapper[4745]: I1208 00:23:21.875876 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-2-build_5e2df92e-e75d-4de2-b225-43d15f8b64d6/manage-dockerfile/0.log" Dec 08 00:23:22 crc kubenswrapper[4745]: I1208 00:23:22.460493 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:23:22 crc kubenswrapper[4745]: I1208 00:23:22.460565 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:23:22 crc kubenswrapper[4745]: I1208 00:23:22.849282 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"5e2df92e-e75d-4de2-b225-43d15f8b64d6","Type":"ContainerStarted","Data":"0f50d425c2b91d49d427e148e6e6e8da6f0f9e7975fd54916889fd7ab59203ce"} Dec 08 00:23:22 crc kubenswrapper[4745]: I1208 00:23:22.875720 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-2-build" podStartSLOduration=5.875700017 podStartE2EDuration="5.875700017s" podCreationTimestamp="2025-12-08 00:23:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:23:22.873610391 +0000 UTC m=+958.302816701" watchObservedRunningTime="2025-12-08 00:23:22.875700017 +0000 UTC m=+958.304906337" Dec 08 00:23:52 crc kubenswrapper[4745]: I1208 00:23:52.460387 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:23:52 crc kubenswrapper[4745]: I1208 00:23:52.461100 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:23:52 crc kubenswrapper[4745]: I1208 00:23:52.461159 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:23:52 crc kubenswrapper[4745]: I1208 00:23:52.462025 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a0b54804879b59a5315813b6e61fe2985b6017fa236833a33d571f68aadbd8c5"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:23:52 crc kubenswrapper[4745]: I1208 00:23:52.462100 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://a0b54804879b59a5315813b6e61fe2985b6017fa236833a33d571f68aadbd8c5" gracePeriod=600 Dec 08 00:23:54 crc kubenswrapper[4745]: I1208 00:23:54.033804 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="a0b54804879b59a5315813b6e61fe2985b6017fa236833a33d571f68aadbd8c5" exitCode=0 Dec 08 00:23:54 crc kubenswrapper[4745]: I1208 00:23:54.033885 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"a0b54804879b59a5315813b6e61fe2985b6017fa236833a33d571f68aadbd8c5"} Dec 08 00:23:54 crc kubenswrapper[4745]: I1208 00:23:54.034234 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"a443b65ce200f6a13a0367075e2dfb76f0fa7985f33955340034550b3bfdf67e"} Dec 08 00:23:54 crc kubenswrapper[4745]: I1208 00:23:54.034258 4745 scope.go:117] "RemoveContainer" containerID="d68e50e68d0ae8a6e03f26cfdb8cf98d132e8e3ab3e913de8377758729efd13e" Dec 08 00:24:47 crc kubenswrapper[4745]: I1208 00:24:47.403679 4745 generic.go:334] "Generic (PLEG): container finished" podID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerID="0f50d425c2b91d49d427e148e6e6e8da6f0f9e7975fd54916889fd7ab59203ce" exitCode=0 Dec 08 00:24:47 crc kubenswrapper[4745]: I1208 00:24:47.403765 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"5e2df92e-e75d-4de2-b225-43d15f8b64d6","Type":"ContainerDied","Data":"0f50d425c2b91d49d427e148e6e6e8da6f0f9e7975fd54916889fd7ab59203ce"} Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.697653 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755117 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildworkdir\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755198 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-system-configs\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755268 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-proxy-ca-bundles\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755297 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-node-pullsecrets\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755352 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-run\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755380 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-pull\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755416 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-push\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755444 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-ca-bundles\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755477 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-blob-cache\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755511 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-root\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755545 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttjrx\" (UniqueName: \"kubernetes.io/projected/5e2df92e-e75d-4de2-b225-43d15f8b64d6-kube-api-access-ttjrx\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.755575 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildcachedir\") pod \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\" (UID: \"5e2df92e-e75d-4de2-b225-43d15f8b64d6\") " Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.756916 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.758694 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.759337 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.759722 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.760072 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.764583 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.771142 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.773585 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.775110 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.778354 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e2df92e-e75d-4de2-b225-43d15f8b64d6-kube-api-access-ttjrx" (OuterVolumeSpecName: "kube-api-access-ttjrx") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "kube-api-access-ttjrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857667 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857696 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857706 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857713 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857724 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857733 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/5e2df92e-e75d-4de2-b225-43d15f8b64d6-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857741 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857751 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttjrx\" (UniqueName: \"kubernetes.io/projected/5e2df92e-e75d-4de2-b225-43d15f8b64d6-kube-api-access-ttjrx\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857759 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.857768 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.944737 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:24:48 crc kubenswrapper[4745]: I1208 00:24:48.958361 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:49 crc kubenswrapper[4745]: I1208 00:24:49.424276 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"5e2df92e-e75d-4de2-b225-43d15f8b64d6","Type":"ContainerDied","Data":"27b1778fe82900ad6f42bddbd0399d2d9ce5c657b716d0f31308e8abd7146232"} Dec 08 00:24:49 crc kubenswrapper[4745]: I1208 00:24:49.424316 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Dec 08 00:24:49 crc kubenswrapper[4745]: I1208 00:24:49.424508 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27b1778fe82900ad6f42bddbd0399d2d9ce5c657b716d0f31308e8abd7146232" Dec 08 00:24:51 crc kubenswrapper[4745]: I1208 00:24:51.459039 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "5e2df92e-e75d-4de2-b225-43d15f8b64d6" (UID: "5e2df92e-e75d-4de2-b225-43d15f8b64d6"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:24:51 crc kubenswrapper[4745]: I1208 00:24:51.494643 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5e2df92e-e75d-4de2-b225-43d15f8b64d6-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.740201 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-1-build"] Dec 08 00:24:53 crc kubenswrapper[4745]: E1208 00:24:53.741350 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerName="git-clone" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.741370 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerName="git-clone" Dec 08 00:24:53 crc kubenswrapper[4745]: E1208 00:24:53.741383 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerName="docker-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.741390 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerName="docker-build" Dec 08 00:24:53 crc kubenswrapper[4745]: E1208 00:24:53.741402 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerName="docker-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.741410 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerName="docker-build" Dec 08 00:24:53 crc kubenswrapper[4745]: E1208 00:24:53.741421 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerName="manage-dockerfile" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.741429 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerName="manage-dockerfile" Dec 08 00:24:53 crc kubenswrapper[4745]: E1208 00:24:53.741446 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerName="manage-dockerfile" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.741453 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerName="manage-dockerfile" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.741579 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ecee30b-4d20-4604-a896-b79d5f18ba2c" containerName="docker-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.741593 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e2df92e-e75d-4de2-b225-43d15f8b64d6" containerName="docker-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.742357 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.744221 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-sys-config" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.744564 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-global-ca" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.744630 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.745735 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-ca" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.754596 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.939832 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-system-configs\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.939876 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.939907 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940043 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfrhf\" (UniqueName: \"kubernetes.io/projected/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-kube-api-access-dfrhf\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940149 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940196 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-root\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940297 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-run\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940336 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildworkdir\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940369 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940438 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-push\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940586 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-pull\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:53 crc kubenswrapper[4745]: I1208 00:24:53.940655 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildcachedir\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.041730 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-pull\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.041838 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildcachedir\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.041905 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-system-configs\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.042015 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.042058 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildcachedir\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.042996 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-system-configs\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043105 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043166 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfrhf\" (UniqueName: \"kubernetes.io/projected/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-kube-api-access-dfrhf\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043221 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043259 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-root\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043298 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-run\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043371 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildworkdir\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043421 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043479 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-push\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043472 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043799 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.043992 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-run\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.044107 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildworkdir\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.044189 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.044258 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-root\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.044486 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.051190 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-push\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.051286 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-pull\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.074111 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfrhf\" (UniqueName: \"kubernetes.io/projected/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-kube-api-access-dfrhf\") pod \"sg-core-1-build\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.354693 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Dec 08 00:24:54 crc kubenswrapper[4745]: I1208 00:24:54.552066 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Dec 08 00:24:55 crc kubenswrapper[4745]: I1208 00:24:55.469655 4745 generic.go:334] "Generic (PLEG): container finished" podID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerID="435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451" exitCode=0 Dec 08 00:24:55 crc kubenswrapper[4745]: I1208 00:24:55.469773 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"78bb3cb2-b352-41f1-aeab-e0d7d9962b70","Type":"ContainerDied","Data":"435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451"} Dec 08 00:24:55 crc kubenswrapper[4745]: I1208 00:24:55.470074 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"78bb3cb2-b352-41f1-aeab-e0d7d9962b70","Type":"ContainerStarted","Data":"f7ebf1ad8fcce6faeeea14e0d185e86f3dd747ce485f4c5b6c10605e0e458c9f"} Dec 08 00:24:56 crc kubenswrapper[4745]: I1208 00:24:56.483636 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"78bb3cb2-b352-41f1-aeab-e0d7d9962b70","Type":"ContainerStarted","Data":"1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545"} Dec 08 00:24:56 crc kubenswrapper[4745]: I1208 00:24:56.521565 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-1-build" podStartSLOduration=3.521531538 podStartE2EDuration="3.521531538s" podCreationTimestamp="2025-12-08 00:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:24:56.513552964 +0000 UTC m=+1051.942759304" watchObservedRunningTime="2025-12-08 00:24:56.521531538 +0000 UTC m=+1051.950737838" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.013918 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.014913 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/sg-core-1-build" podUID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerName="docker-build" containerID="cri-o://1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545" gracePeriod=30 Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.411653 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_78bb3cb2-b352-41f1-aeab-e0d7d9962b70/docker-build/0.log" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.412471 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504048 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-run\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504121 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildworkdir\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504162 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-ca-bundles\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504183 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-node-pullsecrets\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504215 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-system-configs\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504238 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-root\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504284 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-blob-cache\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504317 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfrhf\" (UniqueName: \"kubernetes.io/projected/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-kube-api-access-dfrhf\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504359 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildcachedir\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504394 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-proxy-ca-bundles\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504384 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504437 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-push\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504635 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-pull\") pod \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\" (UID: \"78bb3cb2-b352-41f1-aeab-e0d7d9962b70\") " Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504461 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.504888 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.505345 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.505527 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.505554 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.505572 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.505587 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.505815 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.506380 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.509244 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.511514 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.511724 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-kube-api-access-dfrhf" (OuterVolumeSpecName: "kube-api-access-dfrhf") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "kube-api-access-dfrhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.512534 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.543800 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_78bb3cb2-b352-41f1-aeab-e0d7d9962b70/docker-build/0.log" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.544182 4745 generic.go:334] "Generic (PLEG): container finished" podID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerID="1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545" exitCode=1 Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.544237 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.544225 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"78bb3cb2-b352-41f1-aeab-e0d7d9962b70","Type":"ContainerDied","Data":"1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545"} Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.544281 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"78bb3cb2-b352-41f1-aeab-e0d7d9962b70","Type":"ContainerDied","Data":"f7ebf1ad8fcce6faeeea14e0d185e86f3dd747ce485f4c5b6c10605e0e458c9f"} Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.544304 4745 scope.go:117] "RemoveContainer" containerID="1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.589957 4745 scope.go:117] "RemoveContainer" containerID="435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.607345 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfrhf\" (UniqueName: \"kubernetes.io/projected/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-kube-api-access-dfrhf\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.607384 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.607398 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.607410 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.607425 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.607437 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.608749 4745 scope.go:117] "RemoveContainer" containerID="1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545" Dec 08 00:25:04 crc kubenswrapper[4745]: E1208 00:25:04.609224 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545\": container with ID starting with 1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545 not found: ID does not exist" containerID="1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.609256 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545"} err="failed to get container status \"1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545\": rpc error: code = NotFound desc = could not find container \"1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545\": container with ID starting with 1e76a01768d17308082a9a9210c6cb5e60c0e93df77b3cc7712cc2f2fb511545 not found: ID does not exist" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.609280 4745 scope.go:117] "RemoveContainer" containerID="435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451" Dec 08 00:25:04 crc kubenswrapper[4745]: E1208 00:25:04.609826 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451\": container with ID starting with 435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451 not found: ID does not exist" containerID="435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.609873 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451"} err="failed to get container status \"435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451\": rpc error: code = NotFound desc = could not find container \"435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451\": container with ID starting with 435a41e2b9abc95eb24a2ae9b83a3583cc1fdb485cdf1b62400e152dc1f17451 not found: ID does not exist" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.610326 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.630047 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "78bb3cb2-b352-41f1-aeab-e0d7d9962b70" (UID: "78bb3cb2-b352-41f1-aeab-e0d7d9962b70"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.708537 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.708591 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/78bb3cb2-b352-41f1-aeab-e0d7d9962b70-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.912366 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Dec 08 00:25:04 crc kubenswrapper[4745]: I1208 00:25:04.912721 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-core-1-build"] Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.596803 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-2-build"] Dec 08 00:25:05 crc kubenswrapper[4745]: E1208 00:25:05.597076 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerName="docker-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.597093 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerName="docker-build" Dec 08 00:25:05 crc kubenswrapper[4745]: E1208 00:25:05.597106 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerName="manage-dockerfile" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.597114 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerName="manage-dockerfile" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.597278 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" containerName="docker-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.598328 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.601211 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-ca" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.601245 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-global-ca" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.601452 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-sys-config" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.601621 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.621323 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624192 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624261 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-push\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624316 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624399 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-buildcachedir\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624528 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-system-configs\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624569 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56b52\" (UniqueName: \"kubernetes.io/projected/a00143f7-7638-4d30-a513-a96464e25a18-kube-api-access-56b52\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624588 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-pull\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624612 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624628 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-root\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624698 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-run\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624801 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-buildworkdir\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.624849 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.725552 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.725647 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-push\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.725697 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.725765 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-buildcachedir\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.725840 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-system-configs\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.725893 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-pull\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726045 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56b52\" (UniqueName: \"kubernetes.io/projected/a00143f7-7638-4d30-a513-a96464e25a18-kube-api-access-56b52\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726149 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-buildcachedir\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726369 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726468 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-root\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726570 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-run\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726627 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-buildworkdir\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726677 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.726413 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.727438 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.727696 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-system-configs\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.727980 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-run\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.728048 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-buildworkdir\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.728211 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.728300 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-root\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.728332 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.737743 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-pull\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.737760 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-push\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.746150 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56b52\" (UniqueName: \"kubernetes.io/projected/a00143f7-7638-4d30-a513-a96464e25a18-kube-api-access-56b52\") pod \"sg-core-2-build\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " pod="service-telemetry/sg-core-2-build" Dec 08 00:25:05 crc kubenswrapper[4745]: I1208 00:25:05.939889 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Dec 08 00:25:06 crc kubenswrapper[4745]: I1208 00:25:06.364167 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Dec 08 00:25:06 crc kubenswrapper[4745]: I1208 00:25:06.565293 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"a00143f7-7638-4d30-a513-a96464e25a18","Type":"ContainerStarted","Data":"61a378c319d5e7975a833d22677d675e8e3b30dba15c1448fe625b5764651832"} Dec 08 00:25:06 crc kubenswrapper[4745]: I1208 00:25:06.896884 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78bb3cb2-b352-41f1-aeab-e0d7d9962b70" path="/var/lib/kubelet/pods/78bb3cb2-b352-41f1-aeab-e0d7d9962b70/volumes" Dec 08 00:25:07 crc kubenswrapper[4745]: I1208 00:25:07.576339 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"a00143f7-7638-4d30-a513-a96464e25a18","Type":"ContainerStarted","Data":"3162f8486ece5d7966c01955af4ab23bbc84404063e5145bfca5b52facee9e71"} Dec 08 00:25:08 crc kubenswrapper[4745]: I1208 00:25:08.586350 4745 generic.go:334] "Generic (PLEG): container finished" podID="a00143f7-7638-4d30-a513-a96464e25a18" containerID="3162f8486ece5d7966c01955af4ab23bbc84404063e5145bfca5b52facee9e71" exitCode=0 Dec 08 00:25:08 crc kubenswrapper[4745]: I1208 00:25:08.586432 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"a00143f7-7638-4d30-a513-a96464e25a18","Type":"ContainerDied","Data":"3162f8486ece5d7966c01955af4ab23bbc84404063e5145bfca5b52facee9e71"} Dec 08 00:25:09 crc kubenswrapper[4745]: E1208 00:25:09.140028 4745 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda00143f7_7638_4d30_a513_a96464e25a18.slice/crio-a2014bfbeb8dfadc6974e5e4f2bc7b5e4631840f11f1d3335f0ae7782c0dc478.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda00143f7_7638_4d30_a513_a96464e25a18.slice/crio-conmon-a2014bfbeb8dfadc6974e5e4f2bc7b5e4631840f11f1d3335f0ae7782c0dc478.scope\": RecentStats: unable to find data in memory cache]" Dec 08 00:25:09 crc kubenswrapper[4745]: I1208 00:25:09.604109 4745 generic.go:334] "Generic (PLEG): container finished" podID="a00143f7-7638-4d30-a513-a96464e25a18" containerID="a2014bfbeb8dfadc6974e5e4f2bc7b5e4631840f11f1d3335f0ae7782c0dc478" exitCode=0 Dec 08 00:25:09 crc kubenswrapper[4745]: I1208 00:25:09.604157 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"a00143f7-7638-4d30-a513-a96464e25a18","Type":"ContainerDied","Data":"a2014bfbeb8dfadc6974e5e4f2bc7b5e4631840f11f1d3335f0ae7782c0dc478"} Dec 08 00:25:09 crc kubenswrapper[4745]: I1208 00:25:09.639178 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-2-build_a00143f7-7638-4d30-a513-a96464e25a18/manage-dockerfile/0.log" Dec 08 00:25:10 crc kubenswrapper[4745]: I1208 00:25:10.614977 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"a00143f7-7638-4d30-a513-a96464e25a18","Type":"ContainerStarted","Data":"d33bd4eb0c27e0c0003b5bfdc3e28293c887588636c8356fb8f8adf75f239f42"} Dec 08 00:25:10 crc kubenswrapper[4745]: I1208 00:25:10.649419 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-2-build" podStartSLOduration=5.649401944 podStartE2EDuration="5.649401944s" podCreationTimestamp="2025-12-08 00:25:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:25:10.647351621 +0000 UTC m=+1066.076557931" watchObservedRunningTime="2025-12-08 00:25:10.649401944 +0000 UTC m=+1066.078608244" Dec 08 00:26:22 crc kubenswrapper[4745]: I1208 00:26:22.460881 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:26:22 crc kubenswrapper[4745]: I1208 00:26:22.461500 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:26:52 crc kubenswrapper[4745]: I1208 00:26:52.460250 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:26:52 crc kubenswrapper[4745]: I1208 00:26:52.460921 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:27:22 crc kubenswrapper[4745]: I1208 00:27:22.460411 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:27:22 crc kubenswrapper[4745]: I1208 00:27:22.461324 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:27:22 crc kubenswrapper[4745]: I1208 00:27:22.461399 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:27:22 crc kubenswrapper[4745]: I1208 00:27:22.462471 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a443b65ce200f6a13a0367075e2dfb76f0fa7985f33955340034550b3bfdf67e"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:27:22 crc kubenswrapper[4745]: I1208 00:27:22.462573 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://a443b65ce200f6a13a0367075e2dfb76f0fa7985f33955340034550b3bfdf67e" gracePeriod=600 Dec 08 00:27:23 crc kubenswrapper[4745]: I1208 00:27:23.559208 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="a443b65ce200f6a13a0367075e2dfb76f0fa7985f33955340034550b3bfdf67e" exitCode=0 Dec 08 00:27:23 crc kubenswrapper[4745]: I1208 00:27:23.559489 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"a443b65ce200f6a13a0367075e2dfb76f0fa7985f33955340034550b3bfdf67e"} Dec 08 00:27:23 crc kubenswrapper[4745]: I1208 00:27:23.560072 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"aa3944eefb5c403e042888407a760b9ae69ac970a839ac450c44f0d8351dbb2c"} Dec 08 00:27:23 crc kubenswrapper[4745]: I1208 00:27:23.560102 4745 scope.go:117] "RemoveContainer" containerID="a0b54804879b59a5315813b6e61fe2985b6017fa236833a33d571f68aadbd8c5" Dec 08 00:28:28 crc kubenswrapper[4745]: I1208 00:28:28.284227 4745 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-q5k5x container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 08 00:28:28 crc kubenswrapper[4745]: I1208 00:28:28.285016 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-q5k5x" podUID="64fa4ddd-3dbd-4910-b8f8-dba1bb97b963" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 08 00:28:46 crc kubenswrapper[4745]: I1208 00:28:46.159078 4745 generic.go:334] "Generic (PLEG): container finished" podID="a00143f7-7638-4d30-a513-a96464e25a18" containerID="d33bd4eb0c27e0c0003b5bfdc3e28293c887588636c8356fb8f8adf75f239f42" exitCode=0 Dec 08 00:28:46 crc kubenswrapper[4745]: I1208 00:28:46.161496 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"a00143f7-7638-4d30-a513-a96464e25a18","Type":"ContainerDied","Data":"d33bd4eb0c27e0c0003b5bfdc3e28293c887588636c8356fb8f8adf75f239f42"} Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.514118 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.624335 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-pull\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.624432 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56b52\" (UniqueName: \"kubernetes.io/projected/a00143f7-7638-4d30-a513-a96464e25a18-kube-api-access-56b52\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.624459 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-system-configs\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625091 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-proxy-ca-bundles\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625140 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-buildcachedir\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625181 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-run\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625186 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625222 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-node-pullsecrets\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625231 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625267 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-build-blob-cache\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625313 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-ca-bundles\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625346 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-buildworkdir\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625383 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-push\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625428 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-root\") pod \"a00143f7-7638-4d30-a513-a96464e25a18\" (UID: \"a00143f7-7638-4d30-a513-a96464e25a18\") " Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625410 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625684 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625793 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625946 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625962 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.625974 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a00143f7-7638-4d30-a513-a96464e25a18-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.626621 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.626747 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.631674 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.632706 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a00143f7-7638-4d30-a513-a96464e25a18-kube-api-access-56b52" (OuterVolumeSpecName: "kube-api-access-56b52") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "kube-api-access-56b52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.633637 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.648072 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.727547 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a00143f7-7638-4d30-a513-a96464e25a18-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.727600 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.727618 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.727639 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/a00143f7-7638-4d30-a513-a96464e25a18-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.727660 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56b52\" (UniqueName: \"kubernetes.io/projected/a00143f7-7638-4d30-a513-a96464e25a18-kube-api-access-56b52\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.727676 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:47 crc kubenswrapper[4745]: I1208 00:28:47.971536 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:28:48 crc kubenswrapper[4745]: I1208 00:28:48.033219 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:48 crc kubenswrapper[4745]: I1208 00:28:48.180454 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"a00143f7-7638-4d30-a513-a96464e25a18","Type":"ContainerDied","Data":"61a378c319d5e7975a833d22677d675e8e3b30dba15c1448fe625b5764651832"} Dec 08 00:28:48 crc kubenswrapper[4745]: I1208 00:28:48.180812 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61a378c319d5e7975a833d22677d675e8e3b30dba15c1448fe625b5764651832" Dec 08 00:28:48 crc kubenswrapper[4745]: I1208 00:28:48.180547 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Dec 08 00:28:50 crc kubenswrapper[4745]: I1208 00:28:50.886655 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "a00143f7-7638-4d30-a513-a96464e25a18" (UID: "a00143f7-7638-4d30-a513-a96464e25a18"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:28:50 crc kubenswrapper[4745]: I1208 00:28:50.977228 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a00143f7-7638-4d30-a513-a96464e25a18-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.479160 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-1-build"] Dec 08 00:28:52 crc kubenswrapper[4745]: E1208 00:28:52.480028 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00143f7-7638-4d30-a513-a96464e25a18" containerName="docker-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.480049 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00143f7-7638-4d30-a513-a96464e25a18" containerName="docker-build" Dec 08 00:28:52 crc kubenswrapper[4745]: E1208 00:28:52.480069 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00143f7-7638-4d30-a513-a96464e25a18" containerName="git-clone" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.480078 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00143f7-7638-4d30-a513-a96464e25a18" containerName="git-clone" Dec 08 00:28:52 crc kubenswrapper[4745]: E1208 00:28:52.480105 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a00143f7-7638-4d30-a513-a96464e25a18" containerName="manage-dockerfile" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.480182 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="a00143f7-7638-4d30-a513-a96464e25a18" containerName="manage-dockerfile" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.480479 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="a00143f7-7638-4d30-a513-a96464e25a18" containerName="docker-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.481776 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.496650 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504553 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504607 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504652 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504682 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504714 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504744 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ldlp\" (UniqueName: \"kubernetes.io/projected/c41f19c8-fc0a-4228-95a6-315c311f612a-kube-api-access-9ldlp\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504768 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504788 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-pull\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504812 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504839 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504862 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.504891 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-push\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.512521 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.513451 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-sys-config" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.513633 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-ca" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.513787 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-global-ca" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.605844 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.605902 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ldlp\" (UniqueName: \"kubernetes.io/projected/c41f19c8-fc0a-4228-95a6-315c311f612a-kube-api-access-9ldlp\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.605934 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.605957 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-pull\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.605978 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.605996 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606013 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606033 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-push\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606067 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606083 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606110 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606137 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606500 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606675 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.606897 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.607462 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.607699 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.607858 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.607910 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.607912 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.608089 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.612222 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-push\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.622607 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-pull\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.622848 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ldlp\" (UniqueName: \"kubernetes.io/projected/c41f19c8-fc0a-4228-95a6-315c311f612a-kube-api-access-9ldlp\") pod \"sg-bridge-1-build\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:52 crc kubenswrapper[4745]: I1208 00:28:52.820091 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Dec 08 00:28:53 crc kubenswrapper[4745]: I1208 00:28:53.052597 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Dec 08 00:28:53 crc kubenswrapper[4745]: I1208 00:28:53.214788 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c41f19c8-fc0a-4228-95a6-315c311f612a","Type":"ContainerStarted","Data":"296c13a552663443f22b18a4e3cd9785fe257d128881abfd8753afb5ef4c7604"} Dec 08 00:28:54 crc kubenswrapper[4745]: I1208 00:28:54.223697 4745 generic.go:334] "Generic (PLEG): container finished" podID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerID="b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e" exitCode=0 Dec 08 00:28:54 crc kubenswrapper[4745]: I1208 00:28:54.223771 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c41f19c8-fc0a-4228-95a6-315c311f612a","Type":"ContainerDied","Data":"b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e"} Dec 08 00:28:55 crc kubenswrapper[4745]: I1208 00:28:55.234656 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c41f19c8-fc0a-4228-95a6-315c311f612a","Type":"ContainerStarted","Data":"6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497"} Dec 08 00:28:55 crc kubenswrapper[4745]: I1208 00:28:55.271837 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-1-build" podStartSLOduration=3.2718178 podStartE2EDuration="3.2718178s" podCreationTimestamp="2025-12-08 00:28:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:28:55.263540618 +0000 UTC m=+1290.692746918" watchObservedRunningTime="2025-12-08 00:28:55.2718178 +0000 UTC m=+1290.701024120" Dec 08 00:29:02 crc kubenswrapper[4745]: E1208 00:29:02.605879 4745 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc41f19c8_fc0a_4228_95a6_315c311f612a.slice/crio-conmon-6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc41f19c8_fc0a_4228_95a6_315c311f612a.slice/crio-6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497.scope\": RecentStats: unable to find data in memory cache]" Dec 08 00:29:02 crc kubenswrapper[4745]: I1208 00:29:02.865067 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Dec 08 00:29:02 crc kubenswrapper[4745]: I1208 00:29:02.865431 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/sg-bridge-1-build" podUID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerName="docker-build" containerID="cri-o://6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497" gracePeriod=30 Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.238651 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_c41f19c8-fc0a-4228-95a6-315c311f612a/docker-build/0.log" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.239323 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.297018 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_c41f19c8-fc0a-4228-95a6-315c311f612a/docker-build/0.log" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.297630 4745 generic.go:334] "Generic (PLEG): container finished" podID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerID="6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497" exitCode=1 Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.297675 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c41f19c8-fc0a-4228-95a6-315c311f612a","Type":"ContainerDied","Data":"6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497"} Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.297702 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"c41f19c8-fc0a-4228-95a6-315c311f612a","Type":"ContainerDied","Data":"296c13a552663443f22b18a4e3cd9785fe257d128881abfd8753afb5ef4c7604"} Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.297724 4745 scope.go:117] "RemoveContainer" containerID="6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.297991 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.315843 4745 scope.go:117] "RemoveContainer" containerID="b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.326332 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-push\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.327622 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-ca-bundles\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.327794 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-run\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.327981 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-buildcachedir\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328096 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-pull\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328193 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-buildworkdir\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328322 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-root\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328450 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-system-configs\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328593 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-node-pullsecrets\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328704 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-proxy-ca-bundles\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328839 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ldlp\" (UniqueName: \"kubernetes.io/projected/c41f19c8-fc0a-4228-95a6-315c311f612a-kube-api-access-9ldlp\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328973 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-build-blob-cache\") pod \"c41f19c8-fc0a-4228-95a6-315c311f612a\" (UID: \"c41f19c8-fc0a-4228-95a6-315c311f612a\") " Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328319 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.328348 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.329176 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.330560 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.330597 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.330956 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.331312 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.333141 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.334163 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.334295 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c41f19c8-fc0a-4228-95a6-315c311f612a-kube-api-access-9ldlp" (OuterVolumeSpecName: "kube-api-access-9ldlp") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "kube-api-access-9ldlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.337133 4745 scope.go:117] "RemoveContainer" containerID="6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497" Dec 08 00:29:03 crc kubenswrapper[4745]: E1208 00:29:03.337556 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497\": container with ID starting with 6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497 not found: ID does not exist" containerID="6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.337591 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497"} err="failed to get container status \"6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497\": rpc error: code = NotFound desc = could not find container \"6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497\": container with ID starting with 6b642b3cf2c8ac9e81e9408d830a90eef0d195d915aa10a615b55bcafc78d497 not found: ID does not exist" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.337636 4745 scope.go:117] "RemoveContainer" containerID="b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e" Dec 08 00:29:03 crc kubenswrapper[4745]: E1208 00:29:03.337918 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e\": container with ID starting with b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e not found: ID does not exist" containerID="b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.337970 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e"} err="failed to get container status \"b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e\": rpc error: code = NotFound desc = could not find container \"b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e\": container with ID starting with b1f5fba810ed191d0edf37d2ca32f587213063dec5b61893a1939b7b4403502e not found: ID does not exist" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.406073 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431421 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431489 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431502 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431514 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431529 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431540 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/c41f19c8-fc0a-4228-95a6-315c311f612a-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431552 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431563 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ldlp\" (UniqueName: \"kubernetes.io/projected/c41f19c8-fc0a-4228-95a6-315c311f612a-kube-api-access-9ldlp\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431575 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431586 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/c41f19c8-fc0a-4228-95a6-315c311f612a-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.431598 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c41f19c8-fc0a-4228-95a6-315c311f612a-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.690304 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "c41f19c8-fc0a-4228-95a6-315c311f612a" (UID: "c41f19c8-fc0a-4228-95a6-315c311f612a"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.736663 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/c41f19c8-fc0a-4228-95a6-315c311f612a-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.950966 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Dec 08 00:29:03 crc kubenswrapper[4745]: I1208 00:29:03.958995 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.509157 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-2-build"] Dec 08 00:29:04 crc kubenswrapper[4745]: E1208 00:29:04.509613 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerName="manage-dockerfile" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.509658 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerName="manage-dockerfile" Dec 08 00:29:04 crc kubenswrapper[4745]: E1208 00:29:04.509685 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerName="docker-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.509702 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerName="docker-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.509984 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="c41f19c8-fc0a-4228-95a6-315c311f612a" containerName="docker-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.511835 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.514099 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-sys-config" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.515032 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.517450 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-global-ca" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.518880 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-ca" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.550785 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.654796 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.654862 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.654984 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655017 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-push\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655183 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dflhs\" (UniqueName: \"kubernetes.io/projected/27629ad1-0e70-4203-b122-efbfe8e926cc-kube-api-access-dflhs\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655288 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655359 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-pull\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655412 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655571 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655638 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655676 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.655771 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.756789 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dflhs\" (UniqueName: \"kubernetes.io/projected/27629ad1-0e70-4203-b122-efbfe8e926cc-kube-api-access-dflhs\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.756866 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.756919 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-pull\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757009 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757042 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757067 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757112 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757147 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757208 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757227 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757232 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757287 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757322 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757397 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-push\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.757817 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.758203 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.758222 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.758530 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.758549 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.759162 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.759712 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.763505 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-pull\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.773569 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-push\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.782459 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dflhs\" (UniqueName: \"kubernetes.io/projected/27629ad1-0e70-4203-b122-efbfe8e926cc-kube-api-access-dflhs\") pod \"sg-bridge-2-build\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.837282 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Dec 08 00:29:04 crc kubenswrapper[4745]: I1208 00:29:04.913959 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c41f19c8-fc0a-4228-95a6-315c311f612a" path="/var/lib/kubelet/pods/c41f19c8-fc0a-4228-95a6-315c311f612a/volumes" Dec 08 00:29:05 crc kubenswrapper[4745]: I1208 00:29:05.067657 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Dec 08 00:29:05 crc kubenswrapper[4745]: I1208 00:29:05.312883 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"27629ad1-0e70-4203-b122-efbfe8e926cc","Type":"ContainerStarted","Data":"f5aa1694bef56e69c2b0fa11eec0cbf74da3f8833ff280830a9e0e0d8bc1db6f"} Dec 08 00:29:06 crc kubenswrapper[4745]: I1208 00:29:06.324839 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"27629ad1-0e70-4203-b122-efbfe8e926cc","Type":"ContainerStarted","Data":"f803c7de0f474e76e8747d1eaaf8d5e63a6fad51d26bc6c387ccfa990ef31c64"} Dec 08 00:29:07 crc kubenswrapper[4745]: I1208 00:29:07.337074 4745 generic.go:334] "Generic (PLEG): container finished" podID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerID="f803c7de0f474e76e8747d1eaaf8d5e63a6fad51d26bc6c387ccfa990ef31c64" exitCode=0 Dec 08 00:29:07 crc kubenswrapper[4745]: I1208 00:29:07.337229 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"27629ad1-0e70-4203-b122-efbfe8e926cc","Type":"ContainerDied","Data":"f803c7de0f474e76e8747d1eaaf8d5e63a6fad51d26bc6c387ccfa990ef31c64"} Dec 08 00:29:08 crc kubenswrapper[4745]: I1208 00:29:08.350696 4745 generic.go:334] "Generic (PLEG): container finished" podID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerID="07fa823d90c9b136b5284f38e9b693ce3572088a1b3086a871c6974156066e84" exitCode=0 Dec 08 00:29:08 crc kubenswrapper[4745]: I1208 00:29:08.350764 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"27629ad1-0e70-4203-b122-efbfe8e926cc","Type":"ContainerDied","Data":"07fa823d90c9b136b5284f38e9b693ce3572088a1b3086a871c6974156066e84"} Dec 08 00:29:08 crc kubenswrapper[4745]: I1208 00:29:08.390340 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-2-build_27629ad1-0e70-4203-b122-efbfe8e926cc/manage-dockerfile/0.log" Dec 08 00:29:09 crc kubenswrapper[4745]: I1208 00:29:09.360848 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"27629ad1-0e70-4203-b122-efbfe8e926cc","Type":"ContainerStarted","Data":"700078523d92ff287afb8b409dd6e7b8c96c38c265120384b6f63921254f1c67"} Dec 08 00:29:09 crc kubenswrapper[4745]: I1208 00:29:09.393495 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-2-build" podStartSLOduration=5.393464866 podStartE2EDuration="5.393464866s" podCreationTimestamp="2025-12-08 00:29:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:29:09.385706668 +0000 UTC m=+1304.814913028" watchObservedRunningTime="2025-12-08 00:29:09.393464866 +0000 UTC m=+1304.822671206" Dec 08 00:29:22 crc kubenswrapper[4745]: I1208 00:29:22.460916 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:29:22 crc kubenswrapper[4745]: I1208 00:29:22.461295 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.643190 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mf268"] Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.645679 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.657199 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mf268"] Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.701449 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-utilities\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.701503 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk6xb\" (UniqueName: \"kubernetes.io/projected/a4205f65-d282-4f1c-892d-55abcf670857-kube-api-access-zk6xb\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.701576 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-catalog-content\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.802804 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-utilities\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.803150 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk6xb\" (UniqueName: \"kubernetes.io/projected/a4205f65-d282-4f1c-892d-55abcf670857-kube-api-access-zk6xb\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.803197 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-catalog-content\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.803381 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-utilities\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.803578 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-catalog-content\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.837005 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk6xb\" (UniqueName: \"kubernetes.io/projected/a4205f65-d282-4f1c-892d-55abcf670857-kube-api-access-zk6xb\") pod \"certified-operators-mf268\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:30 crc kubenswrapper[4745]: I1208 00:29:30.972719 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:31 crc kubenswrapper[4745]: I1208 00:29:31.272908 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mf268"] Dec 08 00:29:31 crc kubenswrapper[4745]: I1208 00:29:31.529051 4745 generic.go:334] "Generic (PLEG): container finished" podID="a4205f65-d282-4f1c-892d-55abcf670857" containerID="647379c2837cebb49a48923797641126fa8628e9efb4580495c12837bf12539f" exitCode=0 Dec 08 00:29:31 crc kubenswrapper[4745]: I1208 00:29:31.529253 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf268" event={"ID":"a4205f65-d282-4f1c-892d-55abcf670857","Type":"ContainerDied","Data":"647379c2837cebb49a48923797641126fa8628e9efb4580495c12837bf12539f"} Dec 08 00:29:31 crc kubenswrapper[4745]: I1208 00:29:31.529398 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf268" event={"ID":"a4205f65-d282-4f1c-892d-55abcf670857","Type":"ContainerStarted","Data":"fbebb555a5b6efbbeeb3301bf38b8aea2e6a8fea406329af89a07907b2ba3e21"} Dec 08 00:29:31 crc kubenswrapper[4745]: I1208 00:29:31.530911 4745 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 00:29:32 crc kubenswrapper[4745]: I1208 00:29:32.535960 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf268" event={"ID":"a4205f65-d282-4f1c-892d-55abcf670857","Type":"ContainerStarted","Data":"edb3844e627b7fcf52b9710577652848ff0bd054dd12673d965a8faf0fdbd4ec"} Dec 08 00:29:33 crc kubenswrapper[4745]: I1208 00:29:33.545464 4745 generic.go:334] "Generic (PLEG): container finished" podID="a4205f65-d282-4f1c-892d-55abcf670857" containerID="edb3844e627b7fcf52b9710577652848ff0bd054dd12673d965a8faf0fdbd4ec" exitCode=0 Dec 08 00:29:33 crc kubenswrapper[4745]: I1208 00:29:33.545541 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf268" event={"ID":"a4205f65-d282-4f1c-892d-55abcf670857","Type":"ContainerDied","Data":"edb3844e627b7fcf52b9710577652848ff0bd054dd12673d965a8faf0fdbd4ec"} Dec 08 00:29:34 crc kubenswrapper[4745]: I1208 00:29:34.553247 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf268" event={"ID":"a4205f65-d282-4f1c-892d-55abcf670857","Type":"ContainerStarted","Data":"90bf60da2b346d7bc057a13459028b916a3156fee72125c260f1ca28daa07fd7"} Dec 08 00:29:34 crc kubenswrapper[4745]: I1208 00:29:34.579415 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mf268" podStartSLOduration=2.164807931 podStartE2EDuration="4.579386342s" podCreationTimestamp="2025-12-08 00:29:30 +0000 UTC" firstStartedPulling="2025-12-08 00:29:31.530639056 +0000 UTC m=+1326.959845356" lastFinishedPulling="2025-12-08 00:29:33.945217427 +0000 UTC m=+1329.374423767" observedRunningTime="2025-12-08 00:29:34.574383128 +0000 UTC m=+1330.003589438" watchObservedRunningTime="2025-12-08 00:29:34.579386342 +0000 UTC m=+1330.008592672" Dec 08 00:29:40 crc kubenswrapper[4745]: I1208 00:29:40.973385 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:40 crc kubenswrapper[4745]: I1208 00:29:40.973664 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:41 crc kubenswrapper[4745]: I1208 00:29:41.042102 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:41 crc kubenswrapper[4745]: I1208 00:29:41.660645 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:41 crc kubenswrapper[4745]: I1208 00:29:41.710734 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mf268"] Dec 08 00:29:43 crc kubenswrapper[4745]: I1208 00:29:43.627339 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mf268" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="registry-server" containerID="cri-o://90bf60da2b346d7bc057a13459028b916a3156fee72125c260f1ca28daa07fd7" gracePeriod=2 Dec 08 00:29:44 crc kubenswrapper[4745]: I1208 00:29:44.638378 4745 generic.go:334] "Generic (PLEG): container finished" podID="a4205f65-d282-4f1c-892d-55abcf670857" containerID="90bf60da2b346d7bc057a13459028b916a3156fee72125c260f1ca28daa07fd7" exitCode=0 Dec 08 00:29:44 crc kubenswrapper[4745]: I1208 00:29:44.638434 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf268" event={"ID":"a4205f65-d282-4f1c-892d-55abcf670857","Type":"ContainerDied","Data":"90bf60da2b346d7bc057a13459028b916a3156fee72125c260f1ca28daa07fd7"} Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.242891 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.416514 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk6xb\" (UniqueName: \"kubernetes.io/projected/a4205f65-d282-4f1c-892d-55abcf670857-kube-api-access-zk6xb\") pod \"a4205f65-d282-4f1c-892d-55abcf670857\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.416875 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-utilities\") pod \"a4205f65-d282-4f1c-892d-55abcf670857\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.417207 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-catalog-content\") pod \"a4205f65-d282-4f1c-892d-55abcf670857\" (UID: \"a4205f65-d282-4f1c-892d-55abcf670857\") " Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.418224 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-utilities" (OuterVolumeSpecName: "utilities") pod "a4205f65-d282-4f1c-892d-55abcf670857" (UID: "a4205f65-d282-4f1c-892d-55abcf670857"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.422784 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4205f65-d282-4f1c-892d-55abcf670857-kube-api-access-zk6xb" (OuterVolumeSpecName: "kube-api-access-zk6xb") pod "a4205f65-d282-4f1c-892d-55abcf670857" (UID: "a4205f65-d282-4f1c-892d-55abcf670857"). InnerVolumeSpecName "kube-api-access-zk6xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.468375 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4205f65-d282-4f1c-892d-55abcf670857" (UID: "a4205f65-d282-4f1c-892d-55abcf670857"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.518144 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk6xb\" (UniqueName: \"kubernetes.io/projected/a4205f65-d282-4f1c-892d-55abcf670857-kube-api-access-zk6xb\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.518176 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.518189 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4205f65-d282-4f1c-892d-55abcf670857-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.654695 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mf268" event={"ID":"a4205f65-d282-4f1c-892d-55abcf670857","Type":"ContainerDied","Data":"fbebb555a5b6efbbeeb3301bf38b8aea2e6a8fea406329af89a07907b2ba3e21"} Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.654817 4745 scope.go:117] "RemoveContainer" containerID="90bf60da2b346d7bc057a13459028b916a3156fee72125c260f1ca28daa07fd7" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.654953 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mf268" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.696110 4745 scope.go:117] "RemoveContainer" containerID="edb3844e627b7fcf52b9710577652848ff0bd054dd12673d965a8faf0fdbd4ec" Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.696702 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mf268"] Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.701750 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mf268"] Dec 08 00:29:45 crc kubenswrapper[4745]: I1208 00:29:45.718596 4745 scope.go:117] "RemoveContainer" containerID="647379c2837cebb49a48923797641126fa8628e9efb4580495c12837bf12539f" Dec 08 00:29:46 crc kubenswrapper[4745]: I1208 00:29:46.893509 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4205f65-d282-4f1c-892d-55abcf670857" path="/var/lib/kubelet/pods/a4205f65-d282-4f1c-892d-55abcf670857/volumes" Dec 08 00:29:52 crc kubenswrapper[4745]: I1208 00:29:52.460749 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:29:52 crc kubenswrapper[4745]: I1208 00:29:52.461406 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:29:58 crc kubenswrapper[4745]: I1208 00:29:58.749021 4745 generic.go:334] "Generic (PLEG): container finished" podID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerID="700078523d92ff287afb8b409dd6e7b8c96c38c265120384b6f63921254f1c67" exitCode=0 Dec 08 00:29:58 crc kubenswrapper[4745]: I1208 00:29:58.749156 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"27629ad1-0e70-4203-b122-efbfe8e926cc","Type":"ContainerDied","Data":"700078523d92ff287afb8b409dd6e7b8c96c38c265120384b6f63921254f1c67"} Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.066045 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.141504 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc"] Dec 08 00:30:00 crc kubenswrapper[4745]: E1208 00:30:00.141796 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerName="manage-dockerfile" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.141813 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerName="manage-dockerfile" Dec 08 00:30:00 crc kubenswrapper[4745]: E1208 00:30:00.141823 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerName="git-clone" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.141830 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerName="git-clone" Dec 08 00:30:00 crc kubenswrapper[4745]: E1208 00:30:00.141844 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerName="docker-build" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.141854 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerName="docker-build" Dec 08 00:30:00 crc kubenswrapper[4745]: E1208 00:30:00.141861 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="registry-server" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.141867 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="registry-server" Dec 08 00:30:00 crc kubenswrapper[4745]: E1208 00:30:00.141879 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="extract-content" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.141885 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="extract-content" Dec 08 00:30:00 crc kubenswrapper[4745]: E1208 00:30:00.141895 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="extract-utilities" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.141901 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="extract-utilities" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.142043 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="27629ad1-0e70-4203-b122-efbfe8e926cc" containerName="docker-build" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.142057 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4205f65-d282-4f1c-892d-55abcf670857" containerName="registry-server" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.142512 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.144676 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.144886 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.147334 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc"] Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.148986 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-buildcachedir\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149028 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-build-blob-cache\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149051 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-node-pullsecrets\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149076 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-proxy-ca-bundles\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149077 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149151 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-system-configs\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149167 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149230 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dflhs\" (UniqueName: \"kubernetes.io/projected/27629ad1-0e70-4203-b122-efbfe8e926cc-kube-api-access-dflhs\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149274 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-run\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149328 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-pull\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149358 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-ca-bundles\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149413 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-buildworkdir\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149492 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-root\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149618 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-push\") pod \"27629ad1-0e70-4203-b122-efbfe8e926cc\" (UID: \"27629ad1-0e70-4203-b122-efbfe8e926cc\") " Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.149879 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.150560 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.150565 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.150614 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.151185 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.154555 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.154664 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.155697 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27629ad1-0e70-4203-b122-efbfe8e926cc-kube-api-access-dflhs" (OuterVolumeSpecName: "kube-api-access-dflhs") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "kube-api-access-dflhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156507 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156550 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156565 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156576 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156587 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/27629ad1-0e70-4203-b122-efbfe8e926cc-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156600 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156611 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/27629ad1-0e70-4203-b122-efbfe8e926cc-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156623 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156643 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/27629ad1-0e70-4203-b122-efbfe8e926cc-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.156653 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dflhs\" (UniqueName: \"kubernetes.io/projected/27629ad1-0e70-4203-b122-efbfe8e926cc-kube-api-access-dflhs\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.257901 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef7a3710-8127-4293-8a28-8cee6efe122a-secret-volume\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.258056 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef7a3710-8127-4293-8a28-8cee6efe122a-config-volume\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.258192 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq7f4\" (UniqueName: \"kubernetes.io/projected/ef7a3710-8127-4293-8a28-8cee6efe122a-kube-api-access-nq7f4\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.272517 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.361021 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq7f4\" (UniqueName: \"kubernetes.io/projected/ef7a3710-8127-4293-8a28-8cee6efe122a-kube-api-access-nq7f4\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.361589 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef7a3710-8127-4293-8a28-8cee6efe122a-secret-volume\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.362006 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef7a3710-8127-4293-8a28-8cee6efe122a-config-volume\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.362303 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.364648 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef7a3710-8127-4293-8a28-8cee6efe122a-config-volume\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.370368 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef7a3710-8127-4293-8a28-8cee6efe122a-secret-volume\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.391174 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq7f4\" (UniqueName: \"kubernetes.io/projected/ef7a3710-8127-4293-8a28-8cee6efe122a-kube-api-access-nq7f4\") pod \"collect-profiles-29419230-ft9fc\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.493370 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.749824 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc"] Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.765125 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" event={"ID":"ef7a3710-8127-4293-8a28-8cee6efe122a","Type":"ContainerStarted","Data":"9615050732a643b7cd3de89b9cd200a6dc51317e0a26b9a57a23a998563a62b8"} Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.769876 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"27629ad1-0e70-4203-b122-efbfe8e926cc","Type":"ContainerDied","Data":"f5aa1694bef56e69c2b0fa11eec0cbf74da3f8833ff280830a9e0e0d8bc1db6f"} Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.769914 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5aa1694bef56e69c2b0fa11eec0cbf74da3f8833ff280830a9e0e0d8bc1db6f" Dec 08 00:30:00 crc kubenswrapper[4745]: I1208 00:30:00.770011 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Dec 08 00:30:01 crc kubenswrapper[4745]: I1208 00:30:01.307046 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "27629ad1-0e70-4203-b122-efbfe8e926cc" (UID: "27629ad1-0e70-4203-b122-efbfe8e926cc"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:01 crc kubenswrapper[4745]: I1208 00:30:01.375570 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/27629ad1-0e70-4203-b122-efbfe8e926cc-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:01 crc kubenswrapper[4745]: I1208 00:30:01.779253 4745 generic.go:334] "Generic (PLEG): container finished" podID="ef7a3710-8127-4293-8a28-8cee6efe122a" containerID="892b50b52df71a4a344ee59e9f54557fa63fa46e100f15df3dbf0a32315addc3" exitCode=0 Dec 08 00:30:01 crc kubenswrapper[4745]: I1208 00:30:01.779316 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" event={"ID":"ef7a3710-8127-4293-8a28-8cee6efe122a","Type":"ContainerDied","Data":"892b50b52df71a4a344ee59e9f54557fa63fa46e100f15df3dbf0a32315addc3"} Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.117866 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.201056 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef7a3710-8127-4293-8a28-8cee6efe122a-secret-volume\") pod \"ef7a3710-8127-4293-8a28-8cee6efe122a\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.201213 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef7a3710-8127-4293-8a28-8cee6efe122a-config-volume\") pod \"ef7a3710-8127-4293-8a28-8cee6efe122a\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.201261 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq7f4\" (UniqueName: \"kubernetes.io/projected/ef7a3710-8127-4293-8a28-8cee6efe122a-kube-api-access-nq7f4\") pod \"ef7a3710-8127-4293-8a28-8cee6efe122a\" (UID: \"ef7a3710-8127-4293-8a28-8cee6efe122a\") " Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.201915 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef7a3710-8127-4293-8a28-8cee6efe122a-config-volume" (OuterVolumeSpecName: "config-volume") pod "ef7a3710-8127-4293-8a28-8cee6efe122a" (UID: "ef7a3710-8127-4293-8a28-8cee6efe122a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.207243 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef7a3710-8127-4293-8a28-8cee6efe122a-kube-api-access-nq7f4" (OuterVolumeSpecName: "kube-api-access-nq7f4") pod "ef7a3710-8127-4293-8a28-8cee6efe122a" (UID: "ef7a3710-8127-4293-8a28-8cee6efe122a"). InnerVolumeSpecName "kube-api-access-nq7f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.207256 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef7a3710-8127-4293-8a28-8cee6efe122a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ef7a3710-8127-4293-8a28-8cee6efe122a" (UID: "ef7a3710-8127-4293-8a28-8cee6efe122a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.302917 4745 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ef7a3710-8127-4293-8a28-8cee6efe122a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.302964 4745 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ef7a3710-8127-4293-8a28-8cee6efe122a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.302974 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq7f4\" (UniqueName: \"kubernetes.io/projected/ef7a3710-8127-4293-8a28-8cee6efe122a-kube-api-access-nq7f4\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.794700 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" event={"ID":"ef7a3710-8127-4293-8a28-8cee6efe122a","Type":"ContainerDied","Data":"9615050732a643b7cd3de89b9cd200a6dc51317e0a26b9a57a23a998563a62b8"} Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.795010 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9615050732a643b7cd3de89b9cd200a6dc51317e0a26b9a57a23a998563a62b8" Dec 08 00:30:03 crc kubenswrapper[4745]: I1208 00:30:03.794805 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29419230-ft9fc" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.238423 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Dec 08 00:30:05 crc kubenswrapper[4745]: E1208 00:30:05.238670 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef7a3710-8127-4293-8a28-8cee6efe122a" containerName="collect-profiles" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.238684 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef7a3710-8127-4293-8a28-8cee6efe122a" containerName="collect-profiles" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.238830 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef7a3710-8127-4293-8a28-8cee6efe122a" containerName="collect-profiles" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.239540 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.242390 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-ca" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.243292 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-sys-config" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.244659 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-global-ca" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.247258 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.267367 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332263 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332314 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332352 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332381 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332402 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkgm5\" (UniqueName: \"kubernetes.io/projected/cf85e6c3-1ea1-401a-a633-129491e82b45-kube-api-access-jkgm5\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332425 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332557 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332619 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332658 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332678 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332694 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.332709 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.433719 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.433842 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.433906 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkgm5\" (UniqueName: \"kubernetes.io/projected/cf85e6c3-1ea1-401a-a633-129491e82b45-kube-api-access-jkgm5\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434007 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434060 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434167 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434233 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434287 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434331 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434380 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434459 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434497 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434509 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434511 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434741 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.435152 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.435458 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.435645 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.434465 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.437001 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.437168 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.440600 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.440837 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.455095 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkgm5\" (UniqueName: \"kubernetes.io/projected/cf85e6c3-1ea1-401a-a633-129491e82b45-kube-api-access-jkgm5\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.564615 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:05 crc kubenswrapper[4745]: I1208 00:30:05.876359 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Dec 08 00:30:06 crc kubenswrapper[4745]: I1208 00:30:06.824544 4745 generic.go:334] "Generic (PLEG): container finished" podID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerID="2cf67aeb4fc68d010e7c20138eb98b49c58dcc9ab364d5af0678b1b13943604a" exitCode=0 Dec 08 00:30:06 crc kubenswrapper[4745]: I1208 00:30:06.824629 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"cf85e6c3-1ea1-401a-a633-129491e82b45","Type":"ContainerDied","Data":"2cf67aeb4fc68d010e7c20138eb98b49c58dcc9ab364d5af0678b1b13943604a"} Dec 08 00:30:06 crc kubenswrapper[4745]: I1208 00:30:06.824718 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"cf85e6c3-1ea1-401a-a633-129491e82b45","Type":"ContainerStarted","Data":"8c5232a53d948ea99683b18487fe768849ae6dbbefd6478f93a5c669b78de65d"} Dec 08 00:30:07 crc kubenswrapper[4745]: I1208 00:30:07.837523 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"cf85e6c3-1ea1-401a-a633-129491e82b45","Type":"ContainerStarted","Data":"211d1e50d86a996bdfec74a91c56e083deee6a38722fc1ed2f6912626e90d924"} Dec 08 00:30:07 crc kubenswrapper[4745]: I1208 00:30:07.878456 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-1-build" podStartSLOduration=2.878429611 podStartE2EDuration="2.878429611s" podCreationTimestamp="2025-12-08 00:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:30:07.87315458 +0000 UTC m=+1363.302360910" watchObservedRunningTime="2025-12-08 00:30:07.878429611 +0000 UTC m=+1363.307635951" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.449257 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p88z2"] Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.452108 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.468125 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p88z2"] Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.508772 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-catalog-content\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.508863 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnnxj\" (UniqueName: \"kubernetes.io/projected/5340212b-fd78-48fd-87a9-6c141fa5e516-kube-api-access-fnnxj\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.509091 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-utilities\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.610528 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-utilities\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.610683 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-catalog-content\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.610744 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnnxj\" (UniqueName: \"kubernetes.io/projected/5340212b-fd78-48fd-87a9-6c141fa5e516-kube-api-access-fnnxj\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.611874 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-utilities\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.612006 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-catalog-content\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.653978 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnnxj\" (UniqueName: \"kubernetes.io/projected/5340212b-fd78-48fd-87a9-6c141fa5e516-kube-api-access-fnnxj\") pod \"redhat-operators-p88z2\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:10 crc kubenswrapper[4745]: I1208 00:30:10.817245 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:11 crc kubenswrapper[4745]: I1208 00:30:11.038558 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p88z2"] Dec 08 00:30:11 crc kubenswrapper[4745]: W1208 00:30:11.050109 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5340212b_fd78_48fd_87a9_6c141fa5e516.slice/crio-674360d9c2605f482f8175cfc95434b8b012fd962391035c3ed10607fb3a3edd WatchSource:0}: Error finding container 674360d9c2605f482f8175cfc95434b8b012fd962391035c3ed10607fb3a3edd: Status 404 returned error can't find the container with id 674360d9c2605f482f8175cfc95434b8b012fd962391035c3ed10607fb3a3edd Dec 08 00:30:11 crc kubenswrapper[4745]: I1208 00:30:11.869172 4745 generic.go:334] "Generic (PLEG): container finished" podID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerID="94aae01098fb0a0c03f57a14e51a7b5c9d7dae7777a6b8a34728399d440e0d1c" exitCode=0 Dec 08 00:30:11 crc kubenswrapper[4745]: I1208 00:30:11.869273 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p88z2" event={"ID":"5340212b-fd78-48fd-87a9-6c141fa5e516","Type":"ContainerDied","Data":"94aae01098fb0a0c03f57a14e51a7b5c9d7dae7777a6b8a34728399d440e0d1c"} Dec 08 00:30:11 crc kubenswrapper[4745]: I1208 00:30:11.869561 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p88z2" event={"ID":"5340212b-fd78-48fd-87a9-6c141fa5e516","Type":"ContainerStarted","Data":"674360d9c2605f482f8175cfc95434b8b012fd962391035c3ed10607fb3a3edd"} Dec 08 00:30:13 crc kubenswrapper[4745]: I1208 00:30:13.889996 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p88z2" event={"ID":"5340212b-fd78-48fd-87a9-6c141fa5e516","Type":"ContainerStarted","Data":"078639f651a7137c18f06be20bddd2501dbcc1bb2f366bb6afa474ce97d20a56"} Dec 08 00:30:14 crc kubenswrapper[4745]: I1208 00:30:14.899359 4745 generic.go:334] "Generic (PLEG): container finished" podID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerID="078639f651a7137c18f06be20bddd2501dbcc1bb2f366bb6afa474ce97d20a56" exitCode=0 Dec 08 00:30:14 crc kubenswrapper[4745]: I1208 00:30:14.899407 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p88z2" event={"ID":"5340212b-fd78-48fd-87a9-6c141fa5e516","Type":"ContainerDied","Data":"078639f651a7137c18f06be20bddd2501dbcc1bb2f366bb6afa474ce97d20a56"} Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.515747 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.516465 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/prometheus-webhook-snmp-1-build" podUID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerName="docker-build" containerID="cri-o://211d1e50d86a996bdfec74a91c56e083deee6a38722fc1ed2f6912626e90d924" gracePeriod=30 Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.905502 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p88z2" event={"ID":"5340212b-fd78-48fd-87a9-6c141fa5e516","Type":"ContainerStarted","Data":"4c43b75f2755d00016ea7edf3affb008106dc64bab0f50badcb2867f7110860b"} Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.906865 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_cf85e6c3-1ea1-401a-a633-129491e82b45/docker-build/0.log" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.907150 4745 generic.go:334] "Generic (PLEG): container finished" podID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerID="211d1e50d86a996bdfec74a91c56e083deee6a38722fc1ed2f6912626e90d924" exitCode=1 Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.907179 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"cf85e6c3-1ea1-401a-a633-129491e82b45","Type":"ContainerDied","Data":"211d1e50d86a996bdfec74a91c56e083deee6a38722fc1ed2f6912626e90d924"} Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.907210 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"cf85e6c3-1ea1-401a-a633-129491e82b45","Type":"ContainerDied","Data":"8c5232a53d948ea99683b18487fe768849ae6dbbefd6478f93a5c669b78de65d"} Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.907224 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c5232a53d948ea99683b18487fe768849ae6dbbefd6478f93a5c669b78de65d" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.923161 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p88z2" podStartSLOduration=2.4749994969999998 podStartE2EDuration="5.923145557s" podCreationTimestamp="2025-12-08 00:30:10 +0000 UTC" firstStartedPulling="2025-12-08 00:30:11.872283223 +0000 UTC m=+1367.301489523" lastFinishedPulling="2025-12-08 00:30:15.320429273 +0000 UTC m=+1370.749635583" observedRunningTime="2025-12-08 00:30:15.920504216 +0000 UTC m=+1371.349710526" watchObservedRunningTime="2025-12-08 00:30:15.923145557 +0000 UTC m=+1371.352351847" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.941343 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_cf85e6c3-1ea1-401a-a633-129491e82b45/docker-build/0.log" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.941626 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.984473 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkgm5\" (UniqueName: \"kubernetes.io/projected/cf85e6c3-1ea1-401a-a633-129491e82b45-kube-api-access-jkgm5\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.984520 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-proxy-ca-bundles\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.984549 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-run\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.984604 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-ca-bundles\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985373 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985591 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-build-blob-cache\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985683 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-push\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985724 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-system-configs\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985422 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985745 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-buildcachedir\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985528 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985768 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-root\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985816 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-node-pullsecrets\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985837 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-buildworkdir\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985841 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985865 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-pull\") pod \"cf85e6c3-1ea1-401a-a633-129491e82b45\" (UID: \"cf85e6c3-1ea1-401a-a633-129491e82b45\") " Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.985884 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.986196 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.986421 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.986977 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.987005 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.987054 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.987066 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.987082 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/cf85e6c3-1ea1-401a-a633-129491e82b45-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.987124 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.987138 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/cf85e6c3-1ea1-401a-a633-129491e82b45-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.989840 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf85e6c3-1ea1-401a-a633-129491e82b45-kube-api-access-jkgm5" (OuterVolumeSpecName: "kube-api-access-jkgm5") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "kube-api-access-jkgm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.990411 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:30:15 crc kubenswrapper[4745]: I1208 00:30:15.990743 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.033466 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.088365 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.088402 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.088421 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/cf85e6c3-1ea1-401a-a633-129491e82b45-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.088436 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkgm5\" (UniqueName: \"kubernetes.io/projected/cf85e6c3-1ea1-401a-a633-129491e82b45-kube-api-access-jkgm5\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.314949 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "cf85e6c3-1ea1-401a-a633-129491e82b45" (UID: "cf85e6c3-1ea1-401a-a633-129491e82b45"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.397493 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/cf85e6c3-1ea1-401a-a633-129491e82b45-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.914059 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.944338 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Dec 08 00:30:16 crc kubenswrapper[4745]: I1208 00:30:16.952629 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.202627 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Dec 08 00:30:17 crc kubenswrapper[4745]: E1208 00:30:17.202941 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerName="manage-dockerfile" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.202957 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerName="manage-dockerfile" Dec 08 00:30:17 crc kubenswrapper[4745]: E1208 00:30:17.202966 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerName="docker-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.202974 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerName="docker-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.203091 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf85e6c3-1ea1-401a-a633-129491e82b45" containerName="docker-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.204055 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.206438 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.207031 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-ca" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.210665 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-global-ca" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.220290 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-sys-config" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.237247 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.310079 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.310338 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.310476 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.310620 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.310742 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p992r\" (UniqueName: \"kubernetes.io/projected/f360d7c8-5fad-4800-853f-6a91627ed6b9-kube-api-access-p992r\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.310851 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.310979 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.311115 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.311218 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.311349 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.311474 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.311648 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413365 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413416 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413449 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413477 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413514 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413514 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413547 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413626 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p992r\" (UniqueName: \"kubernetes.io/projected/f360d7c8-5fad-4800-853f-6a91627ed6b9-kube-api-access-p992r\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413658 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413694 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413784 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413820 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.413879 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.414315 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.414524 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.414594 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.414810 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.415068 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.415616 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.415977 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.416181 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.420087 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.422890 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.432610 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p992r\" (UniqueName: \"kubernetes.io/projected/f360d7c8-5fad-4800-853f-6a91627ed6b9-kube-api-access-p992r\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:17 crc kubenswrapper[4745]: I1208 00:30:17.523966 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:30:18 crc kubenswrapper[4745]: I1208 00:30:18.078172 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Dec 08 00:30:18 crc kubenswrapper[4745]: I1208 00:30:18.894214 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf85e6c3-1ea1-401a-a633-129491e82b45" path="/var/lib/kubelet/pods/cf85e6c3-1ea1-401a-a633-129491e82b45/volumes" Dec 08 00:30:18 crc kubenswrapper[4745]: I1208 00:30:18.934779 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerStarted","Data":"7296235e1fd48b0a721f867daa0ea36d00999b6ef66323df6140df9de626bdc4"} Dec 08 00:30:19 crc kubenswrapper[4745]: I1208 00:30:19.946222 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerStarted","Data":"56192ab777b1f3b932c8ae2fb0d60bd858f9f9cd22911ec74e04c539e5bc62e1"} Dec 08 00:30:20 crc kubenswrapper[4745]: I1208 00:30:20.817448 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:20 crc kubenswrapper[4745]: I1208 00:30:20.817519 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:20 crc kubenswrapper[4745]: I1208 00:30:20.953050 4745 generic.go:334] "Generic (PLEG): container finished" podID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerID="56192ab777b1f3b932c8ae2fb0d60bd858f9f9cd22911ec74e04c539e5bc62e1" exitCode=0 Dec 08 00:30:20 crc kubenswrapper[4745]: I1208 00:30:20.953103 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerDied","Data":"56192ab777b1f3b932c8ae2fb0d60bd858f9f9cd22911ec74e04c539e5bc62e1"} Dec 08 00:30:21 crc kubenswrapper[4745]: I1208 00:30:21.873400 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p88z2" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="registry-server" probeResult="failure" output=< Dec 08 00:30:21 crc kubenswrapper[4745]: timeout: failed to connect service ":50051" within 1s Dec 08 00:30:21 crc kubenswrapper[4745]: > Dec 08 00:30:21 crc kubenswrapper[4745]: I1208 00:30:21.963611 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerStarted","Data":"186f9ca751e27944dfe226985fd9072a71e5131d331e4537c0766423c261745f"} Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.000853 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-2-build_f360d7c8-5fad-4800-853f-6a91627ed6b9/manage-dockerfile/0.log" Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.461004 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.461082 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.461140 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.461899 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aa3944eefb5c403e042888407a760b9ae69ac970a839ac450c44f0d8351dbb2c"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.462040 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://aa3944eefb5c403e042888407a760b9ae69ac970a839ac450c44f0d8351dbb2c" gracePeriod=600 Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.973130 4745 generic.go:334] "Generic (PLEG): container finished" podID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerID="186f9ca751e27944dfe226985fd9072a71e5131d331e4537c0766423c261745f" exitCode=0 Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.973187 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerDied","Data":"186f9ca751e27944dfe226985fd9072a71e5131d331e4537c0766423c261745f"} Dec 08 00:30:22 crc kubenswrapper[4745]: I1208 00:30:22.973228 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerStarted","Data":"7b26093857378705963c278f0118ac927a1a71802b8a269f32e0c520546bae8b"} Dec 08 00:30:23 crc kubenswrapper[4745]: I1208 00:30:23.006804 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-2-build" podStartSLOduration=6.006783951 podStartE2EDuration="6.006783951s" podCreationTimestamp="2025-12-08 00:30:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:30:23.003141373 +0000 UTC m=+1378.432347733" watchObservedRunningTime="2025-12-08 00:30:23.006783951 +0000 UTC m=+1378.435990251" Dec 08 00:30:25 crc kubenswrapper[4745]: I1208 00:30:25.999207 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="aa3944eefb5c403e042888407a760b9ae69ac970a839ac450c44f0d8351dbb2c" exitCode=0 Dec 08 00:30:25 crc kubenswrapper[4745]: I1208 00:30:25.999450 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"aa3944eefb5c403e042888407a760b9ae69ac970a839ac450c44f0d8351dbb2c"} Dec 08 00:30:25 crc kubenswrapper[4745]: I1208 00:30:25.999882 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0"} Dec 08 00:30:25 crc kubenswrapper[4745]: I1208 00:30:25.999904 4745 scope.go:117] "RemoveContainer" containerID="a443b65ce200f6a13a0367075e2dfb76f0fa7985f33955340034550b3bfdf67e" Dec 08 00:30:30 crc kubenswrapper[4745]: I1208 00:30:30.864915 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:30 crc kubenswrapper[4745]: I1208 00:30:30.931973 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:31 crc kubenswrapper[4745]: I1208 00:30:31.110367 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p88z2"] Dec 08 00:30:32 crc kubenswrapper[4745]: I1208 00:30:32.060049 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p88z2" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="registry-server" containerID="cri-o://4c43b75f2755d00016ea7edf3affb008106dc64bab0f50badcb2867f7110860b" gracePeriod=2 Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.072735 4745 generic.go:334] "Generic (PLEG): container finished" podID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerID="4c43b75f2755d00016ea7edf3affb008106dc64bab0f50badcb2867f7110860b" exitCode=0 Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.072853 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p88z2" event={"ID":"5340212b-fd78-48fd-87a9-6c141fa5e516","Type":"ContainerDied","Data":"4c43b75f2755d00016ea7edf3affb008106dc64bab0f50badcb2867f7110860b"} Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.637315 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.809231 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-utilities\") pod \"5340212b-fd78-48fd-87a9-6c141fa5e516\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.809295 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnnxj\" (UniqueName: \"kubernetes.io/projected/5340212b-fd78-48fd-87a9-6c141fa5e516-kube-api-access-fnnxj\") pod \"5340212b-fd78-48fd-87a9-6c141fa5e516\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.809415 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-catalog-content\") pod \"5340212b-fd78-48fd-87a9-6c141fa5e516\" (UID: \"5340212b-fd78-48fd-87a9-6c141fa5e516\") " Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.811376 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-utilities" (OuterVolumeSpecName: "utilities") pod "5340212b-fd78-48fd-87a9-6c141fa5e516" (UID: "5340212b-fd78-48fd-87a9-6c141fa5e516"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.825351 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5340212b-fd78-48fd-87a9-6c141fa5e516-kube-api-access-fnnxj" (OuterVolumeSpecName: "kube-api-access-fnnxj") pod "5340212b-fd78-48fd-87a9-6c141fa5e516" (UID: "5340212b-fd78-48fd-87a9-6c141fa5e516"). InnerVolumeSpecName "kube-api-access-fnnxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.912322 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.912410 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnnxj\" (UniqueName: \"kubernetes.io/projected/5340212b-fd78-48fd-87a9-6c141fa5e516-kube-api-access-fnnxj\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:33 crc kubenswrapper[4745]: I1208 00:30:33.967567 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5340212b-fd78-48fd-87a9-6c141fa5e516" (UID: "5340212b-fd78-48fd-87a9-6c141fa5e516"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.014098 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5340212b-fd78-48fd-87a9-6c141fa5e516-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.086683 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p88z2" event={"ID":"5340212b-fd78-48fd-87a9-6c141fa5e516","Type":"ContainerDied","Data":"674360d9c2605f482f8175cfc95434b8b012fd962391035c3ed10607fb3a3edd"} Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.086772 4745 scope.go:117] "RemoveContainer" containerID="4c43b75f2755d00016ea7edf3affb008106dc64bab0f50badcb2867f7110860b" Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.086834 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p88z2" Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.115379 4745 scope.go:117] "RemoveContainer" containerID="078639f651a7137c18f06be20bddd2501dbcc1bb2f366bb6afa474ce97d20a56" Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.131114 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p88z2"] Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.146833 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p88z2"] Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.148743 4745 scope.go:117] "RemoveContainer" containerID="94aae01098fb0a0c03f57a14e51a7b5c9d7dae7777a6b8a34728399d440e0d1c" Dec 08 00:30:34 crc kubenswrapper[4745]: I1208 00:30:34.893080 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" path="/var/lib/kubelet/pods/5340212b-fd78-48fd-87a9-6c141fa5e516/volumes" Dec 08 00:31:24 crc kubenswrapper[4745]: I1208 00:31:24.489720 4745 generic.go:334] "Generic (PLEG): container finished" podID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerID="7b26093857378705963c278f0118ac927a1a71802b8a269f32e0c520546bae8b" exitCode=0 Dec 08 00:31:24 crc kubenswrapper[4745]: I1208 00:31:24.489817 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerDied","Data":"7b26093857378705963c278f0118ac927a1a71802b8a269f32e0c520546bae8b"} Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.853364 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910454 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-pull\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910542 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-blob-cache\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910596 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-node-pullsecrets\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910630 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-system-configs\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910677 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-ca-bundles\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910812 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-root\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910852 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-run\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910909 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-proxy-ca-bundles\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.910967 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildworkdir\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.911013 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p992r\" (UniqueName: \"kubernetes.io/projected/f360d7c8-5fad-4800-853f-6a91627ed6b9-kube-api-access-p992r\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.911072 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildcachedir\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.911121 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-push\") pod \"f360d7c8-5fad-4800-853f-6a91627ed6b9\" (UID: \"f360d7c8-5fad-4800-853f-6a91627ed6b9\") " Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.916390 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.919828 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.919892 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.920454 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.920487 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.922220 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.922743 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f360d7c8-5fad-4800-853f-6a91627ed6b9-kube-api-access-p992r" (OuterVolumeSpecName: "kube-api-access-p992r") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "kube-api-access-p992r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.924126 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.925413 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:31:25 crc kubenswrapper[4745]: I1208 00:31:25.926090 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013329 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013384 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013407 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013430 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013454 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013478 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013501 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p992r\" (UniqueName: \"kubernetes.io/projected/f360d7c8-5fad-4800-853f-6a91627ed6b9-kube-api-access-p992r\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013527 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/f360d7c8-5fad-4800-853f-6a91627ed6b9-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013548 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.013566 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/f360d7c8-5fad-4800-853f-6a91627ed6b9-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.015162 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.116120 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.509900 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"f360d7c8-5fad-4800-853f-6a91627ed6b9","Type":"ContainerDied","Data":"7296235e1fd48b0a721f867daa0ea36d00999b6ef66323df6140df9de626bdc4"} Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.510288 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7296235e1fd48b0a721f867daa0ea36d00999b6ef66323df6140df9de626bdc4" Dec 08 00:31:26 crc kubenswrapper[4745]: I1208 00:31:26.510413 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Dec 08 00:31:27 crc kubenswrapper[4745]: I1208 00:31:27.153984 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "f360d7c8-5fad-4800-853f-6a91627ed6b9" (UID: "f360d7c8-5fad-4800-853f-6a91627ed6b9"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:27 crc kubenswrapper[4745]: I1208 00:31:27.234232 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/f360d7c8-5fad-4800-853f-6a91627ed6b9-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.409034 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Dec 08 00:31:36 crc kubenswrapper[4745]: E1208 00:31:36.409893 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="extract-utilities" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.409909 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="extract-utilities" Dec 08 00:31:36 crc kubenswrapper[4745]: E1208 00:31:36.409921 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerName="docker-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.409984 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerName="docker-build" Dec 08 00:31:36 crc kubenswrapper[4745]: E1208 00:31:36.409999 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="registry-server" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.410007 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="registry-server" Dec 08 00:31:36 crc kubenswrapper[4745]: E1208 00:31:36.410017 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerName="git-clone" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.410024 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerName="git-clone" Dec 08 00:31:36 crc kubenswrapper[4745]: E1208 00:31:36.410036 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="extract-content" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.410043 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="extract-content" Dec 08 00:31:36 crc kubenswrapper[4745]: E1208 00:31:36.410057 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerName="manage-dockerfile" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.410065 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerName="manage-dockerfile" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.410272 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f360d7c8-5fad-4800-853f-6a91627ed6b9" containerName="docker-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.410289 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="5340212b-fd78-48fd-87a9-6c141fa5e516" containerName="registry-server" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.411089 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.413026 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-ca" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.413127 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-sys-config" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.413846 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-global-ca" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.415290 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.425082 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.591044 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.591858 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.591979 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592154 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592352 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592444 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592566 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdfsb\" (UniqueName: \"kubernetes.io/projected/b2039dd9-dac4-49a2-a91d-b3883a785ced-kube-api-access-wdfsb\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592664 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592745 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592866 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.592969 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.593073 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694284 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694370 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694413 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694448 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694505 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdfsb\" (UniqueName: \"kubernetes.io/projected/b2039dd9-dac4-49a2-a91d-b3883a785ced-kube-api-access-wdfsb\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694551 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694582 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694645 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694676 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694716 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694769 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694808 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.694950 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.695577 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.695660 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.696014 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.696394 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.697275 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.697884 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.698029 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.698249 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.704063 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.704086 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.727823 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdfsb\" (UniqueName: \"kubernetes.io/projected/b2039dd9-dac4-49a2-a91d-b3883a785ced-kube-api-access-wdfsb\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.732644 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:36 crc kubenswrapper[4745]: I1208 00:31:36.985461 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Dec 08 00:31:37 crc kubenswrapper[4745]: I1208 00:31:37.629298 4745 generic.go:334] "Generic (PLEG): container finished" podID="b2039dd9-dac4-49a2-a91d-b3883a785ced" containerID="68df80bad1ccea0dc5886a0dfd78203f52022c1103851b7c803e177409926d0f" exitCode=0 Dec 08 00:31:37 crc kubenswrapper[4745]: I1208 00:31:37.629415 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"b2039dd9-dac4-49a2-a91d-b3883a785ced","Type":"ContainerDied","Data":"68df80bad1ccea0dc5886a0dfd78203f52022c1103851b7c803e177409926d0f"} Dec 08 00:31:37 crc kubenswrapper[4745]: I1208 00:31:37.629836 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"b2039dd9-dac4-49a2-a91d-b3883a785ced","Type":"ContainerStarted","Data":"efc31f62b4458d5ae3b304c24aa92221e1e96fb958477051005efce8fc107315"} Dec 08 00:31:38 crc kubenswrapper[4745]: I1208 00:31:38.646483 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_b2039dd9-dac4-49a2-a91d-b3883a785ced/docker-build/0.log" Dec 08 00:31:38 crc kubenswrapper[4745]: I1208 00:31:38.648979 4745 generic.go:334] "Generic (PLEG): container finished" podID="b2039dd9-dac4-49a2-a91d-b3883a785ced" containerID="bf4007db0f862a684c66b44d965ed67efd116568842790dd2ac76f5cfb1a0c00" exitCode=1 Dec 08 00:31:38 crc kubenswrapper[4745]: I1208 00:31:38.649021 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"b2039dd9-dac4-49a2-a91d-b3883a785ced","Type":"ContainerDied","Data":"bf4007db0f862a684c66b44d965ed67efd116568842790dd2ac76f5cfb1a0c00"} Dec 08 00:31:39 crc kubenswrapper[4745]: I1208 00:31:39.940738 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_b2039dd9-dac4-49a2-a91d-b3883a785ced/docker-build/0.log" Dec 08 00:31:39 crc kubenswrapper[4745]: I1208 00:31:39.941723 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146154 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildworkdir\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146435 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-run\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146550 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-node-pullsecrets\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146661 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-root\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146758 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-ca-bundles\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146840 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-pull\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146663 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.146949 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdfsb\" (UniqueName: \"kubernetes.io/projected/b2039dd9-dac4-49a2-a91d-b3883a785ced-kube-api-access-wdfsb\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147104 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-blob-cache\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147165 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildcachedir\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147220 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-push\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147272 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-proxy-ca-bundles\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147260 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147306 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-system-configs\") pod \"b2039dd9-dac4-49a2-a91d-b3883a785ced\" (UID: \"b2039dd9-dac4-49a2-a91d-b3883a785ced\") " Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147332 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147488 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.147633 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148218 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148248 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148263 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148280 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b2039dd9-dac4-49a2-a91d-b3883a785ced-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148292 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148257 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148528 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.148535 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.149303 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.153346 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2039dd9-dac4-49a2-a91d-b3883a785ced-kube-api-access-wdfsb" (OuterVolumeSpecName: "kube-api-access-wdfsb") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "kube-api-access-wdfsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.154982 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.156421 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "b2039dd9-dac4-49a2-a91d-b3883a785ced" (UID: "b2039dd9-dac4-49a2-a91d-b3883a785ced"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.249889 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b2039dd9-dac4-49a2-a91d-b3883a785ced-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.249983 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.250003 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.250021 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdfsb\" (UniqueName: \"kubernetes.io/projected/b2039dd9-dac4-49a2-a91d-b3883a785ced-kube-api-access-wdfsb\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.250040 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b2039dd9-dac4-49a2-a91d-b3883a785ced-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.250057 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.250074 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b2039dd9-dac4-49a2-a91d-b3883a785ced-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.668396 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_b2039dd9-dac4-49a2-a91d-b3883a785ced/docker-build/0.log" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.669286 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"b2039dd9-dac4-49a2-a91d-b3883a785ced","Type":"ContainerDied","Data":"efc31f62b4458d5ae3b304c24aa92221e1e96fb958477051005efce8fc107315"} Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.669328 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efc31f62b4458d5ae3b304c24aa92221e1e96fb958477051005efce8fc107315" Dec 08 00:31:40 crc kubenswrapper[4745]: I1208 00:31:40.669401 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Dec 08 00:31:46 crc kubenswrapper[4745]: I1208 00:31:46.979227 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Dec 08 00:31:46 crc kubenswrapper[4745]: I1208 00:31:46.988665 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.621324 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Dec 08 00:31:48 crc kubenswrapper[4745]: E1208 00:31:48.622193 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2039dd9-dac4-49a2-a91d-b3883a785ced" containerName="docker-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.622225 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2039dd9-dac4-49a2-a91d-b3883a785ced" containerName="docker-build" Dec 08 00:31:48 crc kubenswrapper[4745]: E1208 00:31:48.622258 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2039dd9-dac4-49a2-a91d-b3883a785ced" containerName="manage-dockerfile" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.622276 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2039dd9-dac4-49a2-a91d-b3883a785ced" containerName="manage-dockerfile" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.622556 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2039dd9-dac4-49a2-a91d-b3883a785ced" containerName="docker-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.624474 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.626997 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-sys-config" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.627036 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-ca" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.627065 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-global-ca" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.644741 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.653893 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.787473 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.787519 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.787544 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.787700 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.787852 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.788003 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.788058 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.788183 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.788264 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.788297 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.788438 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqpzk\" (UniqueName: \"kubernetes.io/projected/072d0bb7-70b5-469d-836c-bc6723e9abab-kube-api-access-cqpzk\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.788510 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889359 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889425 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889502 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqpzk\" (UniqueName: \"kubernetes.io/projected/072d0bb7-70b5-469d-836c-bc6723e9abab-kube-api-access-cqpzk\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889521 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889554 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889622 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889685 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889737 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889771 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889810 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889855 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889901 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.889998 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.890528 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.890693 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.890780 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.890953 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.891290 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.891640 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.891779 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.892076 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.892901 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2039dd9-dac4-49a2-a91d-b3883a785ced" path="/var/lib/kubelet/pods/b2039dd9-dac4-49a2-a91d-b3883a785ced/volumes" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.898385 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.899053 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.925821 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqpzk\" (UniqueName: \"kubernetes.io/projected/072d0bb7-70b5-469d-836c-bc6723e9abab-kube-api-access-cqpzk\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:48 crc kubenswrapper[4745]: I1208 00:31:48.955370 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:49 crc kubenswrapper[4745]: I1208 00:31:49.387472 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Dec 08 00:31:49 crc kubenswrapper[4745]: I1208 00:31:49.740062 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"072d0bb7-70b5-469d-836c-bc6723e9abab","Type":"ContainerStarted","Data":"589c13f251e3f4ee25ac330a702d8455634a9c7ae548910824a06c3679fb1af1"} Dec 08 00:31:50 crc kubenswrapper[4745]: I1208 00:31:50.751109 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"072d0bb7-70b5-469d-836c-bc6723e9abab","Type":"ContainerStarted","Data":"8e7e0bc639306ad4dda92af9d765b3b0c05ad5ebaf9dcda7d948404f7d7ead1c"} Dec 08 00:31:51 crc kubenswrapper[4745]: I1208 00:31:51.761344 4745 generic.go:334] "Generic (PLEG): container finished" podID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerID="8e7e0bc639306ad4dda92af9d765b3b0c05ad5ebaf9dcda7d948404f7d7ead1c" exitCode=0 Dec 08 00:31:51 crc kubenswrapper[4745]: I1208 00:31:51.761406 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"072d0bb7-70b5-469d-836c-bc6723e9abab","Type":"ContainerDied","Data":"8e7e0bc639306ad4dda92af9d765b3b0c05ad5ebaf9dcda7d948404f7d7ead1c"} Dec 08 00:31:52 crc kubenswrapper[4745]: I1208 00:31:52.775668 4745 generic.go:334] "Generic (PLEG): container finished" podID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerID="de1d15bc2cbea6c50f05ff13a5783f5917137f4fbaffae53f57061fffca86432" exitCode=0 Dec 08 00:31:52 crc kubenswrapper[4745]: I1208 00:31:52.775767 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"072d0bb7-70b5-469d-836c-bc6723e9abab","Type":"ContainerDied","Data":"de1d15bc2cbea6c50f05ff13a5783f5917137f4fbaffae53f57061fffca86432"} Dec 08 00:31:52 crc kubenswrapper[4745]: I1208 00:31:52.839795 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-2-build_072d0bb7-70b5-469d-836c-bc6723e9abab/manage-dockerfile/0.log" Dec 08 00:31:53 crc kubenswrapper[4745]: I1208 00:31:53.789788 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"072d0bb7-70b5-469d-836c-bc6723e9abab","Type":"ContainerStarted","Data":"3b47c6df019811f240ae0102e10d13118988ef6e6b3c261d6f6b15f6b4168ae4"} Dec 08 00:31:53 crc kubenswrapper[4745]: I1208 00:31:53.843088 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-bundle-2-build" podStartSLOduration=5.843055937 podStartE2EDuration="5.843055937s" podCreationTimestamp="2025-12-08 00:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:31:53.833692976 +0000 UTC m=+1469.262899316" watchObservedRunningTime="2025-12-08 00:31:53.843055937 +0000 UTC m=+1469.272262297" Dec 08 00:31:56 crc kubenswrapper[4745]: I1208 00:31:56.815687 4745 generic.go:334] "Generic (PLEG): container finished" podID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerID="3b47c6df019811f240ae0102e10d13118988ef6e6b3c261d6f6b15f6b4168ae4" exitCode=0 Dec 08 00:31:56 crc kubenswrapper[4745]: I1208 00:31:56.815789 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"072d0bb7-70b5-469d-836c-bc6723e9abab","Type":"ContainerDied","Data":"3b47c6df019811f240ae0102e10d13118988ef6e6b3c261d6f6b15f6b4168ae4"} Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.174411 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342110 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-system-configs\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342179 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-build-blob-cache\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342214 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-root\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342259 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqpzk\" (UniqueName: \"kubernetes.io/projected/072d0bb7-70b5-469d-836c-bc6723e9abab-kube-api-access-cqpzk\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342301 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-buildcachedir\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342371 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-proxy-ca-bundles\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342402 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-pull\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342462 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-ca-bundles\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342521 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-buildworkdir\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342571 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-node-pullsecrets\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342614 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-run\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.342644 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-push\") pod \"072d0bb7-70b5-469d-836c-bc6723e9abab\" (UID: \"072d0bb7-70b5-469d-836c-bc6723e9abab\") " Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.343555 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.343559 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.344253 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.345191 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.345292 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.345734 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.348361 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.348679 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.348703 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/072d0bb7-70b5-469d-836c-bc6723e9abab-kube-api-access-cqpzk" (OuterVolumeSpecName: "kube-api-access-cqpzk") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "kube-api-access-cqpzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.349231 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.350726 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.352090 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "072d0bb7-70b5-469d-836c-bc6723e9abab" (UID: "072d0bb7-70b5-469d-836c-bc6723e9abab"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.444525 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.445341 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.445477 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.445601 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.445777 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.446078 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqpzk\" (UniqueName: \"kubernetes.io/projected/072d0bb7-70b5-469d-836c-bc6723e9abab-kube-api-access-cqpzk\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.446250 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.446408 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.446555 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/072d0bb7-70b5-469d-836c-bc6723e9abab-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.446685 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/072d0bb7-70b5-469d-836c-bc6723e9abab-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.446824 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/072d0bb7-70b5-469d-836c-bc6723e9abab-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.447069 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/072d0bb7-70b5-469d-836c-bc6723e9abab-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.832617 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"072d0bb7-70b5-469d-836c-bc6723e9abab","Type":"ContainerDied","Data":"589c13f251e3f4ee25ac330a702d8455634a9c7ae548910824a06c3679fb1af1"} Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.832658 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="589c13f251e3f4ee25ac330a702d8455634a9c7ae548910824a06c3679fb1af1" Dec 08 00:31:58 crc kubenswrapper[4745]: I1208 00:31:58.832696 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.604938 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Dec 08 00:32:02 crc kubenswrapper[4745]: E1208 00:32:02.605508 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerName="docker-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.605523 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerName="docker-build" Dec 08 00:32:02 crc kubenswrapper[4745]: E1208 00:32:02.605540 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerName="manage-dockerfile" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.605547 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerName="manage-dockerfile" Dec 08 00:32:02 crc kubenswrapper[4745]: E1208 00:32:02.605557 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerName="git-clone" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.605565 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerName="git-clone" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.605716 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="072d0bb7-70b5-469d-836c-bc6723e9abab" containerName="docker-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.606434 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.608949 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-ca" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.609421 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-global-ca" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.609443 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.609556 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-sys-config" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.623813 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716060 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716114 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716152 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716174 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc99m\" (UniqueName: \"kubernetes.io/projected/b82180f4-02d3-4023-b3a0-9f995c0497e7-kube-api-access-vc99m\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716205 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716230 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716329 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716374 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716437 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716493 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716523 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.716549 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818179 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818223 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818242 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818269 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818294 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818326 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818341 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc99m\" (UniqueName: \"kubernetes.io/projected/b82180f4-02d3-4023-b3a0-9f995c0497e7-kube-api-access-vc99m\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818374 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818407 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818420 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818432 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818520 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.818587 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.819113 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.819130 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.819446 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.819462 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.819552 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.819580 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.819804 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.820755 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.824012 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.833540 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.834159 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc99m\" (UniqueName: \"kubernetes.io/projected/b82180f4-02d3-4023-b3a0-9f995c0497e7-kube-api-access-vc99m\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:02 crc kubenswrapper[4745]: I1208 00:32:02.931427 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:03 crc kubenswrapper[4745]: I1208 00:32:03.157270 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Dec 08 00:32:03 crc kubenswrapper[4745]: I1208 00:32:03.870868 4745 generic.go:334] "Generic (PLEG): container finished" podID="b82180f4-02d3-4023-b3a0-9f995c0497e7" containerID="17835e59611acc16836138393fbd1c8a2746ac280b493b7e4742d08a8611e9de" exitCode=0 Dec 08 00:32:03 crc kubenswrapper[4745]: I1208 00:32:03.870965 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b82180f4-02d3-4023-b3a0-9f995c0497e7","Type":"ContainerDied","Data":"17835e59611acc16836138393fbd1c8a2746ac280b493b7e4742d08a8611e9de"} Dec 08 00:32:03 crc kubenswrapper[4745]: I1208 00:32:03.871357 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b82180f4-02d3-4023-b3a0-9f995c0497e7","Type":"ContainerStarted","Data":"83b463fa12006968c114b1e088db621287dba8b2eccbb76a3d285780fe341a68"} Dec 08 00:32:04 crc kubenswrapper[4745]: I1208 00:32:04.883322 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_b82180f4-02d3-4023-b3a0-9f995c0497e7/docker-build/0.log" Dec 08 00:32:04 crc kubenswrapper[4745]: I1208 00:32:04.884474 4745 generic.go:334] "Generic (PLEG): container finished" podID="b82180f4-02d3-4023-b3a0-9f995c0497e7" containerID="bded26f29f0617bd4f0a6d44f392e603869b2f8d7c45c93c1eeb08f6be0d8c99" exitCode=1 Dec 08 00:32:04 crc kubenswrapper[4745]: I1208 00:32:04.893253 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b82180f4-02d3-4023-b3a0-9f995c0497e7","Type":"ContainerDied","Data":"bded26f29f0617bd4f0a6d44f392e603869b2f8d7c45c93c1eeb08f6be0d8c99"} Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.277287 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_b82180f4-02d3-4023-b3a0-9f995c0497e7/docker-build/0.log" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.277882 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368349 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildcachedir\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368412 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-blob-cache\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368443 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-root\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368476 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-node-pullsecrets\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368502 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-run\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368494 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368536 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildworkdir\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368567 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc99m\" (UniqueName: \"kubernetes.io/projected/b82180f4-02d3-4023-b3a0-9f995c0497e7-kube-api-access-vc99m\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368576 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368587 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-push\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368640 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-ca-bundles\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368671 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-pull\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368694 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-proxy-ca-bundles\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.368719 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-system-configs\") pod \"b82180f4-02d3-4023-b3a0-9f995c0497e7\" (UID: \"b82180f4-02d3-4023-b3a0-9f995c0497e7\") " Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.369144 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.369587 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.369617 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.369664 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.369728 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.369870 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.370438 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.371291 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.374031 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b82180f4-02d3-4023-b3a0-9f995c0497e7-kube-api-access-vc99m" (OuterVolumeSpecName: "kube-api-access-vc99m") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "kube-api-access-vc99m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.375015 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.381061 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "b82180f4-02d3-4023-b3a0-9f995c0497e7" (UID: "b82180f4-02d3-4023-b3a0-9f995c0497e7"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470341 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470411 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470432 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b82180f4-02d3-4023-b3a0-9f995c0497e7-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470450 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470469 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b82180f4-02d3-4023-b3a0-9f995c0497e7-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470486 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc99m\" (UniqueName: \"kubernetes.io/projected/b82180f4-02d3-4023-b3a0-9f995c0497e7-kube-api-access-vc99m\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470503 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470521 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470538 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/b82180f4-02d3-4023-b3a0-9f995c0497e7-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470554 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.470572 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b82180f4-02d3-4023-b3a0-9f995c0497e7-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.926291 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_b82180f4-02d3-4023-b3a0-9f995c0497e7/docker-build/0.log" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.927283 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"b82180f4-02d3-4023-b3a0-9f995c0497e7","Type":"ContainerDied","Data":"83b463fa12006968c114b1e088db621287dba8b2eccbb76a3d285780fe341a68"} Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.927349 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83b463fa12006968c114b1e088db621287dba8b2eccbb76a3d285780fe341a68" Dec 08 00:32:06 crc kubenswrapper[4745]: I1208 00:32:06.927440 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Dec 08 00:32:13 crc kubenswrapper[4745]: I1208 00:32:13.711687 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Dec 08 00:32:13 crc kubenswrapper[4745]: I1208 00:32:13.721592 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Dec 08 00:32:14 crc kubenswrapper[4745]: I1208 00:32:14.904868 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b82180f4-02d3-4023-b3a0-9f995c0497e7" path="/var/lib/kubelet/pods/b82180f4-02d3-4023-b3a0-9f995c0497e7/volumes" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.797281 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Dec 08 00:32:15 crc kubenswrapper[4745]: E1208 00:32:15.797757 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b82180f4-02d3-4023-b3a0-9f995c0497e7" containerName="manage-dockerfile" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.797770 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="b82180f4-02d3-4023-b3a0-9f995c0497e7" containerName="manage-dockerfile" Dec 08 00:32:15 crc kubenswrapper[4745]: E1208 00:32:15.797790 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b82180f4-02d3-4023-b3a0-9f995c0497e7" containerName="docker-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.797796 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="b82180f4-02d3-4023-b3a0-9f995c0497e7" containerName="docker-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.797895 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="b82180f4-02d3-4023-b3a0-9f995c0497e7" containerName="docker-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.798660 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.802416 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-ca" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.802714 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-sys-config" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.802766 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-global-ca" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.802715 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.830164 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.917760 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.917974 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918059 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918177 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918258 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t6xc\" (UniqueName: \"kubernetes.io/projected/54fc67bf-1fad-458f-b734-50d52d8c6e36-kube-api-access-7t6xc\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918310 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918371 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918434 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918537 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918585 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918640 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:15 crc kubenswrapper[4745]: I1208 00:32:15.918683 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.019948 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020011 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020036 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020054 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020075 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020113 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020143 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020144 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020159 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020324 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020341 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020362 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t6xc\" (UniqueName: \"kubernetes.io/projected/54fc67bf-1fad-458f-b734-50d52d8c6e36-kube-api-access-7t6xc\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020413 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020460 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020884 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.020960 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.021190 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.021220 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.021271 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.021422 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.021919 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.033382 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.033406 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.036023 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t6xc\" (UniqueName: \"kubernetes.io/projected/54fc67bf-1fad-458f-b734-50d52d8c6e36-kube-api-access-7t6xc\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.131690 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:16 crc kubenswrapper[4745]: I1208 00:32:16.604860 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Dec 08 00:32:17 crc kubenswrapper[4745]: I1208 00:32:17.004133 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"54fc67bf-1fad-458f-b734-50d52d8c6e36","Type":"ContainerStarted","Data":"4673244a84eaf0a7eebe0d2ec4d21ff1689b5aa50e4fdb9536bab9c74b414a44"} Dec 08 00:32:17 crc kubenswrapper[4745]: I1208 00:32:17.004620 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"54fc67bf-1fad-458f-b734-50d52d8c6e36","Type":"ContainerStarted","Data":"04e9e484360c898240a1b42a6a4a15112ee4b80fa73b81820e413002456186d8"} Dec 08 00:32:18 crc kubenswrapper[4745]: I1208 00:32:18.013439 4745 generic.go:334] "Generic (PLEG): container finished" podID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerID="4673244a84eaf0a7eebe0d2ec4d21ff1689b5aa50e4fdb9536bab9c74b414a44" exitCode=0 Dec 08 00:32:18 crc kubenswrapper[4745]: I1208 00:32:18.013475 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"54fc67bf-1fad-458f-b734-50d52d8c6e36","Type":"ContainerDied","Data":"4673244a84eaf0a7eebe0d2ec4d21ff1689b5aa50e4fdb9536bab9c74b414a44"} Dec 08 00:32:19 crc kubenswrapper[4745]: I1208 00:32:19.022095 4745 generic.go:334] "Generic (PLEG): container finished" podID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerID="7f2d6f1220e591d6ca1f1180ef99d2bc68c4c2a9619c892a2ec1eaee6c833afd" exitCode=0 Dec 08 00:32:19 crc kubenswrapper[4745]: I1208 00:32:19.022338 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"54fc67bf-1fad-458f-b734-50d52d8c6e36","Type":"ContainerDied","Data":"7f2d6f1220e591d6ca1f1180ef99d2bc68c4c2a9619c892a2ec1eaee6c833afd"} Dec 08 00:32:19 crc kubenswrapper[4745]: I1208 00:32:19.092493 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-2-build_54fc67bf-1fad-458f-b734-50d52d8c6e36/manage-dockerfile/0.log" Dec 08 00:32:20 crc kubenswrapper[4745]: I1208 00:32:20.036060 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"54fc67bf-1fad-458f-b734-50d52d8c6e36","Type":"ContainerStarted","Data":"d43d89be55dde7b37112e99e9c3f7a8322a8dcbf0c846c1282b612c8f8319037"} Dec 08 00:32:20 crc kubenswrapper[4745]: I1208 00:32:20.088596 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-bundle-2-build" podStartSLOduration=5.088561242 podStartE2EDuration="5.088561242s" podCreationTimestamp="2025-12-08 00:32:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:32:20.07826132 +0000 UTC m=+1495.507467660" watchObservedRunningTime="2025-12-08 00:32:20.088561242 +0000 UTC m=+1495.517767582" Dec 08 00:32:23 crc kubenswrapper[4745]: I1208 00:32:23.066044 4745 generic.go:334] "Generic (PLEG): container finished" podID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerID="d43d89be55dde7b37112e99e9c3f7a8322a8dcbf0c846c1282b612c8f8319037" exitCode=0 Dec 08 00:32:23 crc kubenswrapper[4745]: I1208 00:32:23.066194 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"54fc67bf-1fad-458f-b734-50d52d8c6e36","Type":"ContainerDied","Data":"d43d89be55dde7b37112e99e9c3f7a8322a8dcbf0c846c1282b612c8f8319037"} Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.279496 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.336968 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-ca-bundles\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337013 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildcachedir\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337050 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-node-pullsecrets\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337112 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-blob-cache\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337152 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-root\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337158 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337192 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-pull\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337213 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-proxy-ca-bundles\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337230 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t6xc\" (UniqueName: \"kubernetes.io/projected/54fc67bf-1fad-458f-b734-50d52d8c6e36-kube-api-access-7t6xc\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337253 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-run\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337303 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildworkdir\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337341 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-push\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337374 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-system-configs\") pod \"54fc67bf-1fad-458f-b734-50d52d8c6e36\" (UID: \"54fc67bf-1fad-458f-b734-50d52d8c6e36\") " Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337588 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337769 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337828 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.337959 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.338730 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.338819 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.339176 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.339606 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.342774 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.343213 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.343505 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54fc67bf-1fad-458f-b734-50d52d8c6e36-kube-api-access-7t6xc" (OuterVolumeSpecName: "kube-api-access-7t6xc") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "kube-api-access-7t6xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.344364 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "54fc67bf-1fad-458f-b734-50d52d8c6e36" (UID: "54fc67bf-1fad-458f-b734-50d52d8c6e36"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439277 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439311 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439322 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439331 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t6xc\" (UniqueName: \"kubernetes.io/projected/54fc67bf-1fad-458f-b734-50d52d8c6e36-kube-api-access-7t6xc\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439339 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439349 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439358 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/54fc67bf-1fad-458f-b734-50d52d8c6e36-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439418 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439432 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439440 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/54fc67bf-1fad-458f-b734-50d52d8c6e36-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:24 crc kubenswrapper[4745]: I1208 00:32:24.439448 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/54fc67bf-1fad-458f-b734-50d52d8c6e36-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:32:25 crc kubenswrapper[4745]: I1208 00:32:25.079531 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"54fc67bf-1fad-458f-b734-50d52d8c6e36","Type":"ContainerDied","Data":"04e9e484360c898240a1b42a6a4a15112ee4b80fa73b81820e413002456186d8"} Dec 08 00:32:25 crc kubenswrapper[4745]: I1208 00:32:25.079598 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04e9e484360c898240a1b42a6a4a15112ee4b80fa73b81820e413002456186d8" Dec 08 00:32:25 crc kubenswrapper[4745]: I1208 00:32:25.079720 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.167527 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Dec 08 00:32:43 crc kubenswrapper[4745]: E1208 00:32:43.168401 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerName="git-clone" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.168421 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerName="git-clone" Dec 08 00:32:43 crc kubenswrapper[4745]: E1208 00:32:43.168442 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerName="docker-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.168455 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerName="docker-build" Dec 08 00:32:43 crc kubenswrapper[4745]: E1208 00:32:43.168480 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerName="manage-dockerfile" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.168494 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerName="manage-dockerfile" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.168691 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="54fc67bf-1fad-458f-b734-50d52d8c6e36" containerName="docker-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.170056 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.173012 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-framework-index-dockercfg" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.178211 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-ca" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.178291 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-sys-config" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.178421 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-global-ca" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.178615 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-vzj4m" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.189254 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214593 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214636 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214657 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214677 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214729 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214751 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214780 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214795 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214814 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64cdc\" (UniqueName: \"kubernetes.io/projected/23c63530-67b0-4531-be2b-a2b837391f85-kube-api-access-64cdc\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214830 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214883 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214908 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.214971 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.315840 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316174 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316206 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316244 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316270 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316291 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316332 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316370 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316400 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316412 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316582 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316438 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316700 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316759 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64cdc\" (UniqueName: \"kubernetes.io/projected/23c63530-67b0-4531-be2b-a2b837391f85-kube-api-access-64cdc\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.316795 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.318218 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.318335 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.318519 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.318636 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.319045 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.319177 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.319584 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.323419 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.324097 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.324109 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.351332 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64cdc\" (UniqueName: \"kubernetes.io/projected/23c63530-67b0-4531-be2b-a2b837391f85-kube-api-access-64cdc\") pod \"service-telemetry-framework-index-1-build\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.496073 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:32:43 crc kubenswrapper[4745]: I1208 00:32:43.777431 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Dec 08 00:32:44 crc kubenswrapper[4745]: I1208 00:32:44.232230 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"23c63530-67b0-4531-be2b-a2b837391f85","Type":"ContainerStarted","Data":"9c27b3d8c304745395ad6c2bebd19ad54ea70f8bafd4530aaafd142afe768b86"} Dec 08 00:32:44 crc kubenswrapper[4745]: I1208 00:32:44.232282 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"23c63530-67b0-4531-be2b-a2b837391f85","Type":"ContainerStarted","Data":"b8a7e981bf1d77b3760d41439dcadccb52fe8b39d742950f9b3431828bc7622b"} Dec 08 00:32:45 crc kubenswrapper[4745]: I1208 00:32:45.240539 4745 generic.go:334] "Generic (PLEG): container finished" podID="23c63530-67b0-4531-be2b-a2b837391f85" containerID="9c27b3d8c304745395ad6c2bebd19ad54ea70f8bafd4530aaafd142afe768b86" exitCode=0 Dec 08 00:32:45 crc kubenswrapper[4745]: I1208 00:32:45.240606 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"23c63530-67b0-4531-be2b-a2b837391f85","Type":"ContainerDied","Data":"9c27b3d8c304745395ad6c2bebd19ad54ea70f8bafd4530aaafd142afe768b86"} Dec 08 00:32:46 crc kubenswrapper[4745]: I1208 00:32:46.248167 4745 generic.go:334] "Generic (PLEG): container finished" podID="23c63530-67b0-4531-be2b-a2b837391f85" containerID="cf295b7f95720b216caef5988e19bf646b159fc2bb8845494025559eae57a797" exitCode=0 Dec 08 00:32:46 crc kubenswrapper[4745]: I1208 00:32:46.248277 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"23c63530-67b0-4531-be2b-a2b837391f85","Type":"ContainerDied","Data":"cf295b7f95720b216caef5988e19bf646b159fc2bb8845494025559eae57a797"} Dec 08 00:32:46 crc kubenswrapper[4745]: I1208 00:32:46.281549 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_23c63530-67b0-4531-be2b-a2b837391f85/manage-dockerfile/0.log" Dec 08 00:32:47 crc kubenswrapper[4745]: I1208 00:32:47.261220 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"23c63530-67b0-4531-be2b-a2b837391f85","Type":"ContainerStarted","Data":"3151049dbe60df7ab09102e1a42db51771d4a237301ff125dc9a5767dfef0297"} Dec 08 00:32:47 crc kubenswrapper[4745]: I1208 00:32:47.307502 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-index-1-build" podStartSLOduration=4.307473461 podStartE2EDuration="4.307473461s" podCreationTimestamp="2025-12-08 00:32:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:32:47.299360167 +0000 UTC m=+1522.728566507" watchObservedRunningTime="2025-12-08 00:32:47.307473461 +0000 UTC m=+1522.736679791" Dec 08 00:32:52 crc kubenswrapper[4745]: I1208 00:32:52.460463 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:32:52 crc kubenswrapper[4745]: I1208 00:32:52.460664 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:33:22 crc kubenswrapper[4745]: I1208 00:33:22.460507 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:33:22 crc kubenswrapper[4745]: I1208 00:33:22.461048 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:33:22 crc kubenswrapper[4745]: I1208 00:33:22.519604 4745 generic.go:334] "Generic (PLEG): container finished" podID="23c63530-67b0-4531-be2b-a2b837391f85" containerID="3151049dbe60df7ab09102e1a42db51771d4a237301ff125dc9a5767dfef0297" exitCode=0 Dec 08 00:33:22 crc kubenswrapper[4745]: I1208 00:33:22.519814 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"23c63530-67b0-4531-be2b-a2b837391f85","Type":"ContainerDied","Data":"3151049dbe60df7ab09102e1a42db51771d4a237301ff125dc9a5767dfef0297"} Dec 08 00:33:23 crc kubenswrapper[4745]: I1208 00:33:23.930921 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025489 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-buildcachedir\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025565 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025591 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025611 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-proxy-ca-bundles\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025708 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-ca-bundles\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025762 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-run\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025817 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-system-configs\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025844 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-root\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025865 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-buildworkdir\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025906 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-push\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025947 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-build-blob-cache\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.025977 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64cdc\" (UniqueName: \"kubernetes.io/projected/23c63530-67b0-4531-be2b-a2b837391f85-kube-api-access-64cdc\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.026038 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-node-pullsecrets\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.026093 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-pull\") pod \"23c63530-67b0-4531-be2b-a2b837391f85\" (UID: \"23c63530-67b0-4531-be2b-a2b837391f85\") " Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.026642 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.026421 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.027328 4745 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-buildworkdir\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.027346 4745 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.027360 4745 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/23c63530-67b0-4531-be2b-a2b837391f85-buildcachedir\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.027759 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.027777 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.028055 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.028183 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.031894 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-service-telemetry-framework-index-dockercfg-user-build-volume" (OuterVolumeSpecName: "service-telemetry-framework-index-dockercfg-user-build-volume") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "service-telemetry-framework-index-dockercfg-user-build-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.033171 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-push" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-push") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "builder-dockercfg-vzj4m-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.034081 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-pull" (OuterVolumeSpecName: "builder-dockercfg-vzj4m-pull") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "builder-dockercfg-vzj4m-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.041148 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23c63530-67b0-4531-be2b-a2b837391f85-kube-api-access-64cdc" (OuterVolumeSpecName: "kube-api-access-64cdc") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "kube-api-access-64cdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129030 4745 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-system-configs\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129076 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-push\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-push\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129094 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64cdc\" (UniqueName: \"kubernetes.io/projected/23c63530-67b0-4531-be2b-a2b837391f85-kube-api-access-64cdc\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129108 4745 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-vzj4m-pull\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-builder-dockercfg-vzj4m-pull\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129126 4745 reconciler_common.go:293] "Volume detached for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/23c63530-67b0-4531-be2b-a2b837391f85-service-telemetry-framework-index-dockercfg-user-build-volume\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129144 4745 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129159 4745 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23c63530-67b0-4531-be2b-a2b837391f85-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.129174 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-run\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.239781 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.332658 4745 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-build-blob-cache\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.543211 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"23c63530-67b0-4531-be2b-a2b837391f85","Type":"ContainerDied","Data":"b8a7e981bf1d77b3760d41439dcadccb52fe8b39d742950f9b3431828bc7622b"} Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.543329 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8a7e981bf1d77b3760d41439dcadccb52fe8b39d742950f9b3431828bc7622b" Dec 08 00:33:24 crc kubenswrapper[4745]: I1208 00:33:24.543476 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.917506 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "23c63530-67b0-4531-be2b-a2b837391f85" (UID: "23c63530-67b0-4531-be2b-a2b837391f85"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.970738 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-zd2r6"] Dec 08 00:33:25 crc kubenswrapper[4745]: E1208 00:33:25.971013 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c63530-67b0-4531-be2b-a2b837391f85" containerName="docker-build" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.971028 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c63530-67b0-4531-be2b-a2b837391f85" containerName="docker-build" Dec 08 00:33:25 crc kubenswrapper[4745]: E1208 00:33:25.971045 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c63530-67b0-4531-be2b-a2b837391f85" containerName="git-clone" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.971053 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c63530-67b0-4531-be2b-a2b837391f85" containerName="git-clone" Dec 08 00:33:25 crc kubenswrapper[4745]: E1208 00:33:25.971063 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c63530-67b0-4531-be2b-a2b837391f85" containerName="manage-dockerfile" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.971072 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c63530-67b0-4531-be2b-a2b837391f85" containerName="manage-dockerfile" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.971202 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="23c63530-67b0-4531-be2b-a2b837391f85" containerName="docker-build" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.971684 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-zd2r6" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.974598 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"infrawatch-operators-dockercfg-b68v7" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.978091 4745 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/23c63530-67b0-4531-be2b-a2b837391f85-container-storage-root\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:25 crc kubenswrapper[4745]: I1208 00:33:25.985555 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-zd2r6"] Dec 08 00:33:26 crc kubenswrapper[4745]: I1208 00:33:26.079977 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wc2f\" (UniqueName: \"kubernetes.io/projected/81325c1d-7483-45d1-82ec-fedcb39a03f7-kube-api-access-9wc2f\") pod \"infrawatch-operators-zd2r6\" (UID: \"81325c1d-7483-45d1-82ec-fedcb39a03f7\") " pod="service-telemetry/infrawatch-operators-zd2r6" Dec 08 00:33:26 crc kubenswrapper[4745]: I1208 00:33:26.181399 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wc2f\" (UniqueName: \"kubernetes.io/projected/81325c1d-7483-45d1-82ec-fedcb39a03f7-kube-api-access-9wc2f\") pod \"infrawatch-operators-zd2r6\" (UID: \"81325c1d-7483-45d1-82ec-fedcb39a03f7\") " pod="service-telemetry/infrawatch-operators-zd2r6" Dec 08 00:33:26 crc kubenswrapper[4745]: I1208 00:33:26.214850 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wc2f\" (UniqueName: \"kubernetes.io/projected/81325c1d-7483-45d1-82ec-fedcb39a03f7-kube-api-access-9wc2f\") pod \"infrawatch-operators-zd2r6\" (UID: \"81325c1d-7483-45d1-82ec-fedcb39a03f7\") " pod="service-telemetry/infrawatch-operators-zd2r6" Dec 08 00:33:26 crc kubenswrapper[4745]: I1208 00:33:26.340066 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-zd2r6" Dec 08 00:33:26 crc kubenswrapper[4745]: W1208 00:33:26.842149 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81325c1d_7483_45d1_82ec_fedcb39a03f7.slice/crio-749f845be3839fb5661f103899ae7e909dfe7e7de780f66eb9a920b76c99730e WatchSource:0}: Error finding container 749f845be3839fb5661f103899ae7e909dfe7e7de780f66eb9a920b76c99730e: Status 404 returned error can't find the container with id 749f845be3839fb5661f103899ae7e909dfe7e7de780f66eb9a920b76c99730e Dec 08 00:33:26 crc kubenswrapper[4745]: I1208 00:33:26.843757 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-zd2r6"] Dec 08 00:33:27 crc kubenswrapper[4745]: I1208 00:33:27.566556 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-zd2r6" event={"ID":"81325c1d-7483-45d1-82ec-fedcb39a03f7","Type":"ContainerStarted","Data":"749f845be3839fb5661f103899ae7e909dfe7e7de780f66eb9a920b76c99730e"} Dec 08 00:33:30 crc kubenswrapper[4745]: I1208 00:33:30.360562 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-zd2r6"] Dec 08 00:33:31 crc kubenswrapper[4745]: I1208 00:33:31.164357 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-7bqq4"] Dec 08 00:33:31 crc kubenswrapper[4745]: I1208 00:33:31.168618 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:31 crc kubenswrapper[4745]: I1208 00:33:31.176755 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-7bqq4"] Dec 08 00:33:31 crc kubenswrapper[4745]: I1208 00:33:31.269474 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2csv\" (UniqueName: \"kubernetes.io/projected/a9623372-bf44-47c4-aed3-f3386d39f4c2-kube-api-access-w2csv\") pod \"infrawatch-operators-7bqq4\" (UID: \"a9623372-bf44-47c4-aed3-f3386d39f4c2\") " pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:31 crc kubenswrapper[4745]: I1208 00:33:31.372380 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2csv\" (UniqueName: \"kubernetes.io/projected/a9623372-bf44-47c4-aed3-f3386d39f4c2-kube-api-access-w2csv\") pod \"infrawatch-operators-7bqq4\" (UID: \"a9623372-bf44-47c4-aed3-f3386d39f4c2\") " pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:31 crc kubenswrapper[4745]: I1208 00:33:31.390969 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2csv\" (UniqueName: \"kubernetes.io/projected/a9623372-bf44-47c4-aed3-f3386d39f4c2-kube-api-access-w2csv\") pod \"infrawatch-operators-7bqq4\" (UID: \"a9623372-bf44-47c4-aed3-f3386d39f4c2\") " pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:31 crc kubenswrapper[4745]: I1208 00:33:31.528307 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:36 crc kubenswrapper[4745]: I1208 00:33:36.840534 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-7bqq4"] Dec 08 00:33:38 crc kubenswrapper[4745]: W1208 00:33:38.691813 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9623372_bf44_47c4_aed3_f3386d39f4c2.slice/crio-4eb0db7aec854628d26c68fbe98d9f13824e510db47f12b08a463ccfa62b4a1e WatchSource:0}: Error finding container 4eb0db7aec854628d26c68fbe98d9f13824e510db47f12b08a463ccfa62b4a1e: Status 404 returned error can't find the container with id 4eb0db7aec854628d26c68fbe98d9f13824e510db47f12b08a463ccfa62b4a1e Dec 08 00:33:39 crc kubenswrapper[4745]: I1208 00:33:39.658030 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-zd2r6" event={"ID":"81325c1d-7483-45d1-82ec-fedcb39a03f7","Type":"ContainerStarted","Data":"c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4"} Dec 08 00:33:39 crc kubenswrapper[4745]: I1208 00:33:39.658208 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-zd2r6" podUID="81325c1d-7483-45d1-82ec-fedcb39a03f7" containerName="registry-server" containerID="cri-o://c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4" gracePeriod=2 Dec 08 00:33:39 crc kubenswrapper[4745]: I1208 00:33:39.673974 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-7bqq4" event={"ID":"a9623372-bf44-47c4-aed3-f3386d39f4c2","Type":"ContainerStarted","Data":"2fb1c604416abc93e2ad3f32e8eff4949596d48bd78eb09ab6e422029553c636"} Dec 08 00:33:39 crc kubenswrapper[4745]: I1208 00:33:39.674047 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-7bqq4" event={"ID":"a9623372-bf44-47c4-aed3-f3386d39f4c2","Type":"ContainerStarted","Data":"4eb0db7aec854628d26c68fbe98d9f13824e510db47f12b08a463ccfa62b4a1e"} Dec 08 00:33:39 crc kubenswrapper[4745]: I1208 00:33:39.694293 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-zd2r6" podStartSLOduration=2.284571036 podStartE2EDuration="14.694266814s" podCreationTimestamp="2025-12-08 00:33:25 +0000 UTC" firstStartedPulling="2025-12-08 00:33:26.844779303 +0000 UTC m=+1562.273985623" lastFinishedPulling="2025-12-08 00:33:39.254475091 +0000 UTC m=+1574.683681401" observedRunningTime="2025-12-08 00:33:39.686028967 +0000 UTC m=+1575.115235297" watchObservedRunningTime="2025-12-08 00:33:39.694266814 +0000 UTC m=+1575.123473144" Dec 08 00:33:39 crc kubenswrapper[4745]: I1208 00:33:39.722761 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-7bqq4" podStartSLOduration=8.25216688 podStartE2EDuration="8.722730167s" podCreationTimestamp="2025-12-08 00:33:31 +0000 UTC" firstStartedPulling="2025-12-08 00:33:38.693759751 +0000 UTC m=+1574.122966051" lastFinishedPulling="2025-12-08 00:33:39.164323048 +0000 UTC m=+1574.593529338" observedRunningTime="2025-12-08 00:33:39.710536754 +0000 UTC m=+1575.139743104" watchObservedRunningTime="2025-12-08 00:33:39.722730167 +0000 UTC m=+1575.151936497" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.127629 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-zd2r6" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.196873 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wc2f\" (UniqueName: \"kubernetes.io/projected/81325c1d-7483-45d1-82ec-fedcb39a03f7-kube-api-access-9wc2f\") pod \"81325c1d-7483-45d1-82ec-fedcb39a03f7\" (UID: \"81325c1d-7483-45d1-82ec-fedcb39a03f7\") " Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.202136 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81325c1d-7483-45d1-82ec-fedcb39a03f7-kube-api-access-9wc2f" (OuterVolumeSpecName: "kube-api-access-9wc2f") pod "81325c1d-7483-45d1-82ec-fedcb39a03f7" (UID: "81325c1d-7483-45d1-82ec-fedcb39a03f7"). InnerVolumeSpecName "kube-api-access-9wc2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.299039 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wc2f\" (UniqueName: \"kubernetes.io/projected/81325c1d-7483-45d1-82ec-fedcb39a03f7-kube-api-access-9wc2f\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.688652 4745 generic.go:334] "Generic (PLEG): container finished" podID="81325c1d-7483-45d1-82ec-fedcb39a03f7" containerID="c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4" exitCode=0 Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.688746 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-zd2r6" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.688749 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-zd2r6" event={"ID":"81325c1d-7483-45d1-82ec-fedcb39a03f7","Type":"ContainerDied","Data":"c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4"} Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.689268 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-zd2r6" event={"ID":"81325c1d-7483-45d1-82ec-fedcb39a03f7","Type":"ContainerDied","Data":"749f845be3839fb5661f103899ae7e909dfe7e7de780f66eb9a920b76c99730e"} Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.689317 4745 scope.go:117] "RemoveContainer" containerID="c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.725331 4745 scope.go:117] "RemoveContainer" containerID="c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4" Dec 08 00:33:40 crc kubenswrapper[4745]: E1208 00:33:40.726657 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4\": container with ID starting with c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4 not found: ID does not exist" containerID="c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.726721 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4"} err="failed to get container status \"c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4\": rpc error: code = NotFound desc = could not find container \"c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4\": container with ID starting with c54f4a3f1f6efbc9e6386bff245f5d0fe968e9890a1d2fa11f5e5aac35a659e4 not found: ID does not exist" Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.747341 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-zd2r6"] Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.759289 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-zd2r6"] Dec 08 00:33:40 crc kubenswrapper[4745]: I1208 00:33:40.889544 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81325c1d-7483-45d1-82ec-fedcb39a03f7" path="/var/lib/kubelet/pods/81325c1d-7483-45d1-82ec-fedcb39a03f7/volumes" Dec 08 00:33:41 crc kubenswrapper[4745]: I1208 00:33:41.529558 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:41 crc kubenswrapper[4745]: I1208 00:33:41.529643 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:41 crc kubenswrapper[4745]: I1208 00:33:41.578258 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:51 crc kubenswrapper[4745]: I1208 00:33:51.578363 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-7bqq4" Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.460353 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.460456 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.460524 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.461417 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.461550 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" gracePeriod=600 Dec 08 00:33:52 crc kubenswrapper[4745]: E1208 00:33:52.589868 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.788838 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" exitCode=0 Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.788894 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0"} Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.788967 4745 scope.go:117] "RemoveContainer" containerID="aa3944eefb5c403e042888407a760b9ae69ac970a839ac450c44f0d8351dbb2c" Dec 08 00:33:52 crc kubenswrapper[4745]: I1208 00:33:52.789602 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:33:52 crc kubenswrapper[4745]: E1208 00:33:52.789958 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.819657 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9"] Dec 08 00:33:53 crc kubenswrapper[4745]: E1208 00:33:53.820216 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81325c1d-7483-45d1-82ec-fedcb39a03f7" containerName="registry-server" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.820229 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="81325c1d-7483-45d1-82ec-fedcb39a03f7" containerName="registry-server" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.820349 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="81325c1d-7483-45d1-82ec-fedcb39a03f7" containerName="registry-server" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.821473 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.845109 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9"] Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.897567 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.897615 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zc5q\" (UniqueName: \"kubernetes.io/projected/7fb76e12-b143-4660-964e-8972e0c007a8-kube-api-access-2zc5q\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.897717 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.999195 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.999260 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:53 crc kubenswrapper[4745]: I1208 00:33:53.999295 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zc5q\" (UniqueName: \"kubernetes.io/projected/7fb76e12-b143-4660-964e-8972e0c007a8-kube-api-access-2zc5q\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.000027 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.000031 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.017160 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zc5q\" (UniqueName: \"kubernetes.io/projected/7fb76e12-b143-4660-964e-8972e0c007a8-kube-api-access-2zc5q\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.198329 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.627904 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf"] Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.629393 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.639387 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf"] Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.675241 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9"] Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.714627 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq446\" (UniqueName: \"kubernetes.io/projected/6e65d076-6f7e-48d3-85d6-e969e8484151-kube-api-access-pq446\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.714670 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.714695 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.809678 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" event={"ID":"7fb76e12-b143-4660-964e-8972e0c007a8","Type":"ContainerStarted","Data":"efeb0f5707f62d69b4f831f139c4822dd03538ce99b06f13140d0df1a7783413"} Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.816526 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq446\" (UniqueName: \"kubernetes.io/projected/6e65d076-6f7e-48d3-85d6-e969e8484151-kube-api-access-pq446\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.816576 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.816606 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.817860 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.817880 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.834139 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq446\" (UniqueName: \"kubernetes.io/projected/6e65d076-6f7e-48d3-85d6-e969e8484151-kube-api-access-pq446\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:54 crc kubenswrapper[4745]: I1208 00:33:54.953147 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:55 crc kubenswrapper[4745]: I1208 00:33:55.241142 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf"] Dec 08 00:33:55 crc kubenswrapper[4745]: W1208 00:33:55.244072 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e65d076_6f7e_48d3_85d6_e969e8484151.slice/crio-e2a2551768e6725416773c76c9031b1bf12c3f5625568f0a5c680c0fe3ed297f WatchSource:0}: Error finding container e2a2551768e6725416773c76c9031b1bf12c3f5625568f0a5c680c0fe3ed297f: Status 404 returned error can't find the container with id e2a2551768e6725416773c76c9031b1bf12c3f5625568f0a5c680c0fe3ed297f Dec 08 00:33:55 crc kubenswrapper[4745]: I1208 00:33:55.819015 4745 generic.go:334] "Generic (PLEG): container finished" podID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerID="3d4181033e59ffac0379e1e6860b49e58e365cbc1ce94d696a9ecc356b6cc4c2" exitCode=0 Dec 08 00:33:55 crc kubenswrapper[4745]: I1208 00:33:55.819294 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" event={"ID":"6e65d076-6f7e-48d3-85d6-e969e8484151","Type":"ContainerDied","Data":"3d4181033e59ffac0379e1e6860b49e58e365cbc1ce94d696a9ecc356b6cc4c2"} Dec 08 00:33:55 crc kubenswrapper[4745]: I1208 00:33:55.819730 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" event={"ID":"6e65d076-6f7e-48d3-85d6-e969e8484151","Type":"ContainerStarted","Data":"e2a2551768e6725416773c76c9031b1bf12c3f5625568f0a5c680c0fe3ed297f"} Dec 08 00:33:55 crc kubenswrapper[4745]: I1208 00:33:55.823026 4745 generic.go:334] "Generic (PLEG): container finished" podID="7fb76e12-b143-4660-964e-8972e0c007a8" containerID="2bd32e529b3334470a8421c9482ff05fc95542ee18642b71594f4cbaa1cc97c8" exitCode=0 Dec 08 00:33:55 crc kubenswrapper[4745]: I1208 00:33:55.823120 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" event={"ID":"7fb76e12-b143-4660-964e-8972e0c007a8","Type":"ContainerDied","Data":"2bd32e529b3334470a8421c9482ff05fc95542ee18642b71594f4cbaa1cc97c8"} Dec 08 00:33:56 crc kubenswrapper[4745]: I1208 00:33:56.834919 4745 generic.go:334] "Generic (PLEG): container finished" podID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerID="bbc07331d8a39b1ce1544ccea12fc76bdf6d37a8add1808983a3034f41528ce1" exitCode=0 Dec 08 00:33:56 crc kubenswrapper[4745]: I1208 00:33:56.835534 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" event={"ID":"6e65d076-6f7e-48d3-85d6-e969e8484151","Type":"ContainerDied","Data":"bbc07331d8a39b1ce1544ccea12fc76bdf6d37a8add1808983a3034f41528ce1"} Dec 08 00:33:56 crc kubenswrapper[4745]: I1208 00:33:56.840550 4745 generic.go:334] "Generic (PLEG): container finished" podID="7fb76e12-b143-4660-964e-8972e0c007a8" containerID="7a23dad21055dcd26bc85535252ef08629ec04e4223f092ebe9516234b129b27" exitCode=0 Dec 08 00:33:56 crc kubenswrapper[4745]: I1208 00:33:56.840617 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" event={"ID":"7fb76e12-b143-4660-964e-8972e0c007a8","Type":"ContainerDied","Data":"7a23dad21055dcd26bc85535252ef08629ec04e4223f092ebe9516234b129b27"} Dec 08 00:33:57 crc kubenswrapper[4745]: I1208 00:33:57.851554 4745 generic.go:334] "Generic (PLEG): container finished" podID="7fb76e12-b143-4660-964e-8972e0c007a8" containerID="22ebe21f88d1c15a65900922d42c7a142fc7290ecae63f48ba3b945690277f84" exitCode=0 Dec 08 00:33:57 crc kubenswrapper[4745]: I1208 00:33:57.851620 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" event={"ID":"7fb76e12-b143-4660-964e-8972e0c007a8","Type":"ContainerDied","Data":"22ebe21f88d1c15a65900922d42c7a142fc7290ecae63f48ba3b945690277f84"} Dec 08 00:33:57 crc kubenswrapper[4745]: I1208 00:33:57.854342 4745 generic.go:334] "Generic (PLEG): container finished" podID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerID="d1a5f81812626473aa1c39d436fb203413514d38a5da15959ef607a05eae422d" exitCode=0 Dec 08 00:33:57 crc kubenswrapper[4745]: I1208 00:33:57.854409 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" event={"ID":"6e65d076-6f7e-48d3-85d6-e969e8484151","Type":"ContainerDied","Data":"d1a5f81812626473aa1c39d436fb203413514d38a5da15959ef607a05eae422d"} Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.175237 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.181673 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.273515 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-util\") pod \"7fb76e12-b143-4660-964e-8972e0c007a8\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.273646 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zc5q\" (UniqueName: \"kubernetes.io/projected/7fb76e12-b143-4660-964e-8972e0c007a8-kube-api-access-2zc5q\") pod \"7fb76e12-b143-4660-964e-8972e0c007a8\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.273686 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-bundle\") pod \"6e65d076-6f7e-48d3-85d6-e969e8484151\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.273727 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq446\" (UniqueName: \"kubernetes.io/projected/6e65d076-6f7e-48d3-85d6-e969e8484151-kube-api-access-pq446\") pod \"6e65d076-6f7e-48d3-85d6-e969e8484151\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.273766 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-util\") pod \"6e65d076-6f7e-48d3-85d6-e969e8484151\" (UID: \"6e65d076-6f7e-48d3-85d6-e969e8484151\") " Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.273801 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-bundle\") pod \"7fb76e12-b143-4660-964e-8972e0c007a8\" (UID: \"7fb76e12-b143-4660-964e-8972e0c007a8\") " Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.274401 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-bundle" (OuterVolumeSpecName: "bundle") pod "7fb76e12-b143-4660-964e-8972e0c007a8" (UID: "7fb76e12-b143-4660-964e-8972e0c007a8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.274489 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-bundle" (OuterVolumeSpecName: "bundle") pod "6e65d076-6f7e-48d3-85d6-e969e8484151" (UID: "6e65d076-6f7e-48d3-85d6-e969e8484151"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.281100 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fb76e12-b143-4660-964e-8972e0c007a8-kube-api-access-2zc5q" (OuterVolumeSpecName: "kube-api-access-2zc5q") pod "7fb76e12-b143-4660-964e-8972e0c007a8" (UID: "7fb76e12-b143-4660-964e-8972e0c007a8"). InnerVolumeSpecName "kube-api-access-2zc5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.284169 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e65d076-6f7e-48d3-85d6-e969e8484151-kube-api-access-pq446" (OuterVolumeSpecName: "kube-api-access-pq446") pod "6e65d076-6f7e-48d3-85d6-e969e8484151" (UID: "6e65d076-6f7e-48d3-85d6-e969e8484151"). InnerVolumeSpecName "kube-api-access-pq446". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.329405 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-util" (OuterVolumeSpecName: "util") pod "6e65d076-6f7e-48d3-85d6-e969e8484151" (UID: "6e65d076-6f7e-48d3-85d6-e969e8484151"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.336977 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-util" (OuterVolumeSpecName: "util") pod "7fb76e12-b143-4660-964e-8972e0c007a8" (UID: "7fb76e12-b143-4660-964e-8972e0c007a8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.375361 4745 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.375399 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq446\" (UniqueName: \"kubernetes.io/projected/6e65d076-6f7e-48d3-85d6-e969e8484151-kube-api-access-pq446\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.375410 4745 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6e65d076-6f7e-48d3-85d6-e969e8484151-util\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.375419 4745 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-bundle\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.375427 4745 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb76e12-b143-4660-964e-8972e0c007a8-util\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.375435 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zc5q\" (UniqueName: \"kubernetes.io/projected/7fb76e12-b143-4660-964e-8972e0c007a8-kube-api-access-2zc5q\") on node \"crc\" DevicePath \"\"" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.869582 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.869585 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65adh7g9" event={"ID":"7fb76e12-b143-4660-964e-8972e0c007a8","Type":"ContainerDied","Data":"efeb0f5707f62d69b4f831f139c4822dd03538ce99b06f13140d0df1a7783413"} Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.869717 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efeb0f5707f62d69b4f831f139c4822dd03538ce99b06f13140d0df1a7783413" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.872665 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" event={"ID":"6e65d076-6f7e-48d3-85d6-e969e8484151","Type":"ContainerDied","Data":"e2a2551768e6725416773c76c9031b1bf12c3f5625568f0a5c680c0fe3ed297f"} Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.872697 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2a2551768e6725416773c76c9031b1bf12c3f5625568f0a5c680c0fe3ed297f" Dec 08 00:33:59 crc kubenswrapper[4745]: I1208 00:33:59.872765 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c09snhlf" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.949469 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw"] Dec 08 00:34:04 crc kubenswrapper[4745]: E1208 00:34:04.949837 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb76e12-b143-4660-964e-8972e0c007a8" containerName="pull" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.949859 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb76e12-b143-4660-964e-8972e0c007a8" containerName="pull" Dec 08 00:34:04 crc kubenswrapper[4745]: E1208 00:34:04.949881 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb76e12-b143-4660-964e-8972e0c007a8" containerName="util" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.949892 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb76e12-b143-4660-964e-8972e0c007a8" containerName="util" Dec 08 00:34:04 crc kubenswrapper[4745]: E1208 00:34:04.949916 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb76e12-b143-4660-964e-8972e0c007a8" containerName="extract" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.949951 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb76e12-b143-4660-964e-8972e0c007a8" containerName="extract" Dec 08 00:34:04 crc kubenswrapper[4745]: E1208 00:34:04.949972 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerName="pull" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.949984 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerName="pull" Dec 08 00:34:04 crc kubenswrapper[4745]: E1208 00:34:04.949999 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerName="extract" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.950009 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerName="extract" Dec 08 00:34:04 crc kubenswrapper[4745]: E1208 00:34:04.950026 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerName="util" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.950039 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerName="util" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.950232 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e65d076-6f7e-48d3-85d6-e969e8484151" containerName="extract" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.950251 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fb76e12-b143-4660-964e-8972e0c007a8" containerName="extract" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.950873 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.953470 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-g9hvj" Dec 08 00:34:04 crc kubenswrapper[4745]: I1208 00:34:04.960881 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw"] Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.020543 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/5d411af2-29cc-431a-a376-9031915ed5c1-runner\") pod \"service-telemetry-operator-8667fccdb7-8fsfw\" (UID: \"5d411af2-29cc-431a-a376-9031915ed5c1\") " pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.020629 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pz55\" (UniqueName: \"kubernetes.io/projected/5d411af2-29cc-431a-a376-9031915ed5c1-kube-api-access-7pz55\") pod \"service-telemetry-operator-8667fccdb7-8fsfw\" (UID: \"5d411af2-29cc-431a-a376-9031915ed5c1\") " pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.121694 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pz55\" (UniqueName: \"kubernetes.io/projected/5d411af2-29cc-431a-a376-9031915ed5c1-kube-api-access-7pz55\") pod \"service-telemetry-operator-8667fccdb7-8fsfw\" (UID: \"5d411af2-29cc-431a-a376-9031915ed5c1\") " pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.122017 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/5d411af2-29cc-431a-a376-9031915ed5c1-runner\") pod \"service-telemetry-operator-8667fccdb7-8fsfw\" (UID: \"5d411af2-29cc-431a-a376-9031915ed5c1\") " pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.122576 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/5d411af2-29cc-431a-a376-9031915ed5c1-runner\") pod \"service-telemetry-operator-8667fccdb7-8fsfw\" (UID: \"5d411af2-29cc-431a-a376-9031915ed5c1\") " pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.142036 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pz55\" (UniqueName: \"kubernetes.io/projected/5d411af2-29cc-431a-a376-9031915ed5c1-kube-api-access-7pz55\") pod \"service-telemetry-operator-8667fccdb7-8fsfw\" (UID: \"5d411af2-29cc-431a-a376-9031915ed5c1\") " pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.281189 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.465358 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw"] Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.882874 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:34:05 crc kubenswrapper[4745]: E1208 00:34:05.883760 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:34:05 crc kubenswrapper[4745]: I1208 00:34:05.921005 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" event={"ID":"5d411af2-29cc-431a-a376-9031915ed5c1","Type":"ContainerStarted","Data":"11453f4e6d56bb2b70a6e8055c7bc380aafd995ebcaceb9c1598dcdda27f5862"} Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.764591 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-599b4778bd-f4452"] Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.765877 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.771385 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-bl8pm" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.774237 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hrqqk"] Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.775645 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.782354 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-599b4778bd-f4452"] Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.846834 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hrqqk"] Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.866097 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-catalog-content\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.866149 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdtsx\" (UniqueName: \"kubernetes.io/projected/16b0d4ba-0de0-4171-be83-e536a3199c16-kube-api-access-bdtsx\") pod \"smart-gateway-operator-599b4778bd-f4452\" (UID: \"16b0d4ba-0de0-4171-be83-e536a3199c16\") " pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.866177 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-utilities\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.866202 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z4l6\" (UniqueName: \"kubernetes.io/projected/bb7f815e-9afe-436b-96ed-797f0703645b-kube-api-access-4z4l6\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.866314 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/16b0d4ba-0de0-4171-be83-e536a3199c16-runner\") pod \"smart-gateway-operator-599b4778bd-f4452\" (UID: \"16b0d4ba-0de0-4171-be83-e536a3199c16\") " pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.967609 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-catalog-content\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.967660 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdtsx\" (UniqueName: \"kubernetes.io/projected/16b0d4ba-0de0-4171-be83-e536a3199c16-kube-api-access-bdtsx\") pod \"smart-gateway-operator-599b4778bd-f4452\" (UID: \"16b0d4ba-0de0-4171-be83-e536a3199c16\") " pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.967687 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-utilities\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.967704 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z4l6\" (UniqueName: \"kubernetes.io/projected/bb7f815e-9afe-436b-96ed-797f0703645b-kube-api-access-4z4l6\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.967735 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/16b0d4ba-0de0-4171-be83-e536a3199c16-runner\") pod \"smart-gateway-operator-599b4778bd-f4452\" (UID: \"16b0d4ba-0de0-4171-be83-e536a3199c16\") " pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.969325 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-catalog-content\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.969452 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-utilities\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.969674 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/16b0d4ba-0de0-4171-be83-e536a3199c16-runner\") pod \"smart-gateway-operator-599b4778bd-f4452\" (UID: \"16b0d4ba-0de0-4171-be83-e536a3199c16\") " pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.986774 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z4l6\" (UniqueName: \"kubernetes.io/projected/bb7f815e-9afe-436b-96ed-797f0703645b-kube-api-access-4z4l6\") pod \"community-operators-hrqqk\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:07 crc kubenswrapper[4745]: I1208 00:34:07.989546 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdtsx\" (UniqueName: \"kubernetes.io/projected/16b0d4ba-0de0-4171-be83-e536a3199c16-kube-api-access-bdtsx\") pod \"smart-gateway-operator-599b4778bd-f4452\" (UID: \"16b0d4ba-0de0-4171-be83-e536a3199c16\") " pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:08 crc kubenswrapper[4745]: I1208 00:34:08.110595 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" Dec 08 00:34:08 crc kubenswrapper[4745]: I1208 00:34:08.117838 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:08 crc kubenswrapper[4745]: I1208 00:34:08.447150 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hrqqk"] Dec 08 00:34:08 crc kubenswrapper[4745]: W1208 00:34:08.461037 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb7f815e_9afe_436b_96ed_797f0703645b.slice/crio-5c595611102f4608d643b77d3f7ef6e398756c8a684ff161d640e88b86f3884c WatchSource:0}: Error finding container 5c595611102f4608d643b77d3f7ef6e398756c8a684ff161d640e88b86f3884c: Status 404 returned error can't find the container with id 5c595611102f4608d643b77d3f7ef6e398756c8a684ff161d640e88b86f3884c Dec 08 00:34:08 crc kubenswrapper[4745]: I1208 00:34:08.603694 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-599b4778bd-f4452"] Dec 08 00:34:08 crc kubenswrapper[4745]: W1208 00:34:08.607121 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16b0d4ba_0de0_4171_be83_e536a3199c16.slice/crio-73814a4be92d93c8869c1bd69e772ceacd4520aaf8e2c5db52a67710694d3008 WatchSource:0}: Error finding container 73814a4be92d93c8869c1bd69e772ceacd4520aaf8e2c5db52a67710694d3008: Status 404 returned error can't find the container with id 73814a4be92d93c8869c1bd69e772ceacd4520aaf8e2c5db52a67710694d3008 Dec 08 00:34:08 crc kubenswrapper[4745]: I1208 00:34:08.948449 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" event={"ID":"16b0d4ba-0de0-4171-be83-e536a3199c16","Type":"ContainerStarted","Data":"73814a4be92d93c8869c1bd69e772ceacd4520aaf8e2c5db52a67710694d3008"} Dec 08 00:34:08 crc kubenswrapper[4745]: I1208 00:34:08.949535 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrqqk" event={"ID":"bb7f815e-9afe-436b-96ed-797f0703645b","Type":"ContainerStarted","Data":"5c595611102f4608d643b77d3f7ef6e398756c8a684ff161d640e88b86f3884c"} Dec 08 00:34:09 crc kubenswrapper[4745]: I1208 00:34:09.965611 4745 generic.go:334] "Generic (PLEG): container finished" podID="bb7f815e-9afe-436b-96ed-797f0703645b" containerID="d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb" exitCode=0 Dec 08 00:34:09 crc kubenswrapper[4745]: I1208 00:34:09.965655 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrqqk" event={"ID":"bb7f815e-9afe-436b-96ed-797f0703645b","Type":"ContainerDied","Data":"d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb"} Dec 08 00:34:10 crc kubenswrapper[4745]: I1208 00:34:10.988347 4745 generic.go:334] "Generic (PLEG): container finished" podID="bb7f815e-9afe-436b-96ed-797f0703645b" containerID="5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851" exitCode=0 Dec 08 00:34:10 crc kubenswrapper[4745]: I1208 00:34:10.988389 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrqqk" event={"ID":"bb7f815e-9afe-436b-96ed-797f0703645b","Type":"ContainerDied","Data":"5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851"} Dec 08 00:34:12 crc kubenswrapper[4745]: I1208 00:34:12.005554 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrqqk" event={"ID":"bb7f815e-9afe-436b-96ed-797f0703645b","Type":"ContainerStarted","Data":"5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f"} Dec 08 00:34:12 crc kubenswrapper[4745]: I1208 00:34:12.028388 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hrqqk" podStartSLOduration=3.6357844630000002 podStartE2EDuration="5.028372061s" podCreationTimestamp="2025-12-08 00:34:07 +0000 UTC" firstStartedPulling="2025-12-08 00:34:09.967636804 +0000 UTC m=+1605.396843104" lastFinishedPulling="2025-12-08 00:34:11.360224402 +0000 UTC m=+1606.789430702" observedRunningTime="2025-12-08 00:34:12.026586134 +0000 UTC m=+1607.455792434" watchObservedRunningTime="2025-12-08 00:34:12.028372061 +0000 UTC m=+1607.457578361" Dec 08 00:34:17 crc kubenswrapper[4745]: I1208 00:34:17.882436 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:34:17 crc kubenswrapper[4745]: E1208 00:34:17.883268 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:34:18 crc kubenswrapper[4745]: I1208 00:34:18.119005 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:18 crc kubenswrapper[4745]: I1208 00:34:18.119317 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:18 crc kubenswrapper[4745]: I1208 00:34:18.169389 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:19 crc kubenswrapper[4745]: I1208 00:34:19.116276 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:21 crc kubenswrapper[4745]: I1208 00:34:21.356585 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hrqqk"] Dec 08 00:34:22 crc kubenswrapper[4745]: I1208 00:34:22.075541 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hrqqk" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="registry-server" containerID="cri-o://5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f" gracePeriod=2 Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.005772 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.123708 4745 generic.go:334] "Generic (PLEG): container finished" podID="bb7f815e-9afe-436b-96ed-797f0703645b" containerID="5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f" exitCode=0 Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.123751 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrqqk" event={"ID":"bb7f815e-9afe-436b-96ed-797f0703645b","Type":"ContainerDied","Data":"5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f"} Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.123782 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hrqqk" event={"ID":"bb7f815e-9afe-436b-96ed-797f0703645b","Type":"ContainerDied","Data":"5c595611102f4608d643b77d3f7ef6e398756c8a684ff161d640e88b86f3884c"} Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.123801 4745 scope.go:117] "RemoveContainer" containerID="5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.123821 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hrqqk" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.207470 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-utilities\") pod \"bb7f815e-9afe-436b-96ed-797f0703645b\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.207545 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-catalog-content\") pod \"bb7f815e-9afe-436b-96ed-797f0703645b\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.207598 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4z4l6\" (UniqueName: \"kubernetes.io/projected/bb7f815e-9afe-436b-96ed-797f0703645b-kube-api-access-4z4l6\") pod \"bb7f815e-9afe-436b-96ed-797f0703645b\" (UID: \"bb7f815e-9afe-436b-96ed-797f0703645b\") " Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.209098 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-utilities" (OuterVolumeSpecName: "utilities") pod "bb7f815e-9afe-436b-96ed-797f0703645b" (UID: "bb7f815e-9afe-436b-96ed-797f0703645b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.212757 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb7f815e-9afe-436b-96ed-797f0703645b-kube-api-access-4z4l6" (OuterVolumeSpecName: "kube-api-access-4z4l6") pod "bb7f815e-9afe-436b-96ed-797f0703645b" (UID: "bb7f815e-9afe-436b-96ed-797f0703645b"). InnerVolumeSpecName "kube-api-access-4z4l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.262162 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb7f815e-9afe-436b-96ed-797f0703645b" (UID: "bb7f815e-9afe-436b-96ed-797f0703645b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.309432 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.309471 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4z4l6\" (UniqueName: \"kubernetes.io/projected/bb7f815e-9afe-436b-96ed-797f0703645b-kube-api-access-4z4l6\") on node \"crc\" DevicePath \"\"" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.309487 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb7f815e-9afe-436b-96ed-797f0703645b-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.472993 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hrqqk"] Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.482270 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hrqqk"] Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.894700 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" path="/var/lib/kubelet/pods/bb7f815e-9afe-436b-96ed-797f0703645b/volumes" Dec 08 00:34:24 crc kubenswrapper[4745]: I1208 00:34:24.988086 4745 scope.go:117] "RemoveContainer" containerID="5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851" Dec 08 00:34:31 crc kubenswrapper[4745]: I1208 00:34:31.882783 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:34:31 crc kubenswrapper[4745]: E1208 00:34:31.883255 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:34:33 crc kubenswrapper[4745]: I1208 00:34:33.497522 4745 scope.go:117] "RemoveContainer" containerID="d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb" Dec 08 00:34:34 crc kubenswrapper[4745]: I1208 00:34:34.313407 4745 scope.go:117] "RemoveContainer" containerID="5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.313910 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f\": container with ID starting with 5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f not found: ID does not exist" containerID="5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f" Dec 08 00:34:34 crc kubenswrapper[4745]: I1208 00:34:34.313980 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f"} err="failed to get container status \"5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f\": rpc error: code = NotFound desc = could not find container \"5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f\": container with ID starting with 5a79458843759df5b8980f49d1699e1b215370956b7918fd7e09c83a1baf3b2f not found: ID does not exist" Dec 08 00:34:34 crc kubenswrapper[4745]: I1208 00:34:34.314000 4745 scope.go:117] "RemoveContainer" containerID="5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.314549 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851\": container with ID starting with 5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851 not found: ID does not exist" containerID="5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851" Dec 08 00:34:34 crc kubenswrapper[4745]: I1208 00:34:34.314615 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851"} err="failed to get container status \"5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851\": rpc error: code = NotFound desc = could not find container \"5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851\": container with ID starting with 5e5f1783fbdbdb3d369addaeb7a25c950789d2414162661ad36c721b71299851 not found: ID does not exist" Dec 08 00:34:34 crc kubenswrapper[4745]: I1208 00:34:34.314657 4745 scope.go:117] "RemoveContainer" containerID="d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.315068 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb\": container with ID starting with d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb not found: ID does not exist" containerID="d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb" Dec 08 00:34:34 crc kubenswrapper[4745]: I1208 00:34:34.315120 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb"} err="failed to get container status \"d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb\": rpc error: code = NotFound desc = could not find container \"d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb\": container with ID starting with d199874af8d3b0064e4b6ed80b070dc57c433cd9af1522fe262391a7f9ffbecb not found: ID does not exist" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.436845 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:latest" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.437127 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1765153888,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bdtsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-599b4778bd-f4452_service-telemetry(16b0d4ba-0de0-4171-be83-e536a3199c16): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.442445 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" podUID="16b0d4ba-0de0-4171-be83-e536a3199c16" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.620486 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/service-telemetry-operator:latest" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.620706 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/service-telemetry-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:service-telemetry-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_WEBHOOK_SNMP_IMAGE,Value:image-registry.openshift-image-registry.svc:5000/service-telemetry/prometheus-webhook-snmp:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_IMAGE,Value:quay.io/prometheus/prometheus:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER_IMAGE,Value:quay.io/prometheus/alertmanager:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:service-telemetry-operator.v1.5.1765153890,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7pz55,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod service-telemetry-operator-8667fccdb7-8fsfw_service-telemetry(5d411af2-29cc-431a-a376-9031915ed5c1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:34:34 crc kubenswrapper[4745]: E1208 00:34:34.622080 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" podUID="5d411af2-29cc-431a-a376-9031915ed5c1" Dec 08 00:34:35 crc kubenswrapper[4745]: E1208 00:34:35.226358 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:latest\\\"\"" pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" podUID="16b0d4ba-0de0-4171-be83-e536a3199c16" Dec 08 00:34:35 crc kubenswrapper[4745]: E1208 00:34:35.227642 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/service-telemetry-operator:latest\\\"\"" pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" podUID="5d411af2-29cc-431a-a376-9031915ed5c1" Dec 08 00:34:43 crc kubenswrapper[4745]: I1208 00:34:43.882573 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:34:43 crc kubenswrapper[4745]: E1208 00:34:43.883433 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:34:46 crc kubenswrapper[4745]: I1208 00:34:46.887259 4745 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 00:34:48 crc kubenswrapper[4745]: I1208 00:34:48.355076 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" event={"ID":"5d411af2-29cc-431a-a376-9031915ed5c1","Type":"ContainerStarted","Data":"4277d4180572db21cb7a94fd3a9093d23b0fcedb4d1a4db609ee3e408a1fe62d"} Dec 08 00:34:48 crc kubenswrapper[4745]: I1208 00:34:48.387186 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-8667fccdb7-8fsfw" podStartSLOduration=2.263572803 podStartE2EDuration="44.387142414s" podCreationTimestamp="2025-12-08 00:34:04 +0000 UTC" firstStartedPulling="2025-12-08 00:34:05.477300341 +0000 UTC m=+1600.906506641" lastFinishedPulling="2025-12-08 00:34:47.600869912 +0000 UTC m=+1643.030076252" observedRunningTime="2025-12-08 00:34:48.3850992 +0000 UTC m=+1643.814305510" watchObservedRunningTime="2025-12-08 00:34:48.387142414 +0000 UTC m=+1643.816348744" Dec 08 00:34:51 crc kubenswrapper[4745]: I1208 00:34:51.376091 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" event={"ID":"16b0d4ba-0de0-4171-be83-e536a3199c16","Type":"ContainerStarted","Data":"c0071a1a5c384e5233d13f4bf192938bd1f5c680656fbb2fa025fd83e8785429"} Dec 08 00:34:51 crc kubenswrapper[4745]: I1208 00:34:51.397583 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-599b4778bd-f4452" podStartSLOduration=2.691311399 podStartE2EDuration="44.397563581s" podCreationTimestamp="2025-12-08 00:34:07 +0000 UTC" firstStartedPulling="2025-12-08 00:34:08.608813309 +0000 UTC m=+1604.038019609" lastFinishedPulling="2025-12-08 00:34:50.315065451 +0000 UTC m=+1645.744271791" observedRunningTime="2025-12-08 00:34:51.397072048 +0000 UTC m=+1646.826278358" watchObservedRunningTime="2025-12-08 00:34:51.397563581 +0000 UTC m=+1646.826769881" Dec 08 00:34:55 crc kubenswrapper[4745]: I1208 00:34:55.882262 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:34:55 crc kubenswrapper[4745]: E1208 00:34:55.882715 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:35:09 crc kubenswrapper[4745]: I1208 00:35:09.883591 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:35:09 crc kubenswrapper[4745]: E1208 00:35:09.886012 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.480729 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-x4p4w"] Dec 08 00:35:10 crc kubenswrapper[4745]: E1208 00:35:10.481104 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="registry-server" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.481133 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="registry-server" Dec 08 00:35:10 crc kubenswrapper[4745]: E1208 00:35:10.481162 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="extract-utilities" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.481178 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="extract-utilities" Dec 08 00:35:10 crc kubenswrapper[4745]: E1208 00:35:10.481204 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="extract-content" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.481219 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="extract-content" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.481412 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb7f815e-9afe-436b-96ed-797f0703645b" containerName="registry-server" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.482163 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.485482 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.486056 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.486327 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.487008 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.487197 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.487360 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.487555 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-zkcw9" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.530364 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-x4p4w"] Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.560851 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.560909 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-users\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.560960 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-config\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.560995 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.561119 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.561160 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.561200 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rmq6\" (UniqueName: \"kubernetes.io/projected/0b5687a7-1989-4d78-8106-1049126d45ff-kube-api-access-5rmq6\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.662044 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.662416 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.662458 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rmq6\" (UniqueName: \"kubernetes.io/projected/0b5687a7-1989-4d78-8106-1049126d45ff-kube-api-access-5rmq6\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.662502 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.662538 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-users\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.662566 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-config\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.662607 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.663938 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-config\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.682851 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-users\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.682903 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.683244 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.683530 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.683585 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.688270 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rmq6\" (UniqueName: \"kubernetes.io/projected/0b5687a7-1989-4d78-8106-1049126d45ff-kube-api-access-5rmq6\") pod \"default-interconnect-68864d46cb-x4p4w\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:10 crc kubenswrapper[4745]: I1208 00:35:10.821275 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:35:11 crc kubenswrapper[4745]: I1208 00:35:11.094053 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-x4p4w"] Dec 08 00:35:11 crc kubenswrapper[4745]: I1208 00:35:11.524253 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" event={"ID":"0b5687a7-1989-4d78-8106-1049126d45ff","Type":"ContainerStarted","Data":"b0be894bbc434b753bae42bd7341fb1fd3bfabd4bbe43ac8526b92cffee79d3e"} Dec 08 00:35:15 crc kubenswrapper[4745]: I1208 00:35:15.568250 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" event={"ID":"0b5687a7-1989-4d78-8106-1049126d45ff","Type":"ContainerStarted","Data":"53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f"} Dec 08 00:35:15 crc kubenswrapper[4745]: I1208 00:35:15.592415 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" podStartSLOduration=1.713792918 podStartE2EDuration="5.592392043s" podCreationTimestamp="2025-12-08 00:35:10 +0000 UTC" firstStartedPulling="2025-12-08 00:35:11.109432404 +0000 UTC m=+1666.538638714" lastFinishedPulling="2025-12-08 00:35:14.988031499 +0000 UTC m=+1670.417237839" observedRunningTime="2025-12-08 00:35:15.587892454 +0000 UTC m=+1671.017098764" watchObservedRunningTime="2025-12-08 00:35:15.592392043 +0000 UTC m=+1671.021598363" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.358416 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.360696 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.364106 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.365132 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-m4vxx" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.365263 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.365340 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.365389 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.365493 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.365580 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.365580 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.379095 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504010 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-config-out\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504094 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-addab583-525e-4905-8a3f-793e96036058\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-addab583-525e-4905-8a3f-793e96036058\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504164 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmrkw\" (UniqueName: \"kubernetes.io/projected/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-kube-api-access-wmrkw\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504233 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504296 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504337 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504379 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-tls-assets\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504423 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-web-config\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504462 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-config\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.504497 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.608268 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.608335 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.608384 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-tls-assets\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.608431 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-web-config\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.609984 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-config\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.610041 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.610051 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.610164 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-config-out\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.610240 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-addab583-525e-4905-8a3f-793e96036058\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-addab583-525e-4905-8a3f-793e96036058\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.610336 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmrkw\" (UniqueName: \"kubernetes.io/projected/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-kube-api-access-wmrkw\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.610461 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: E1208 00:35:20.610631 4745 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Dec 08 00:35:20 crc kubenswrapper[4745]: E1208 00:35:20.610716 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls podName:16fb1bc8-8e50-4d0b-ae02-040249f1bf88 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:21.11068734 +0000 UTC m=+1676.539893680 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "16fb1bc8-8e50-4d0b-ae02-040249f1bf88") : secret "default-prometheus-proxy-tls" not found Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.612029 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.615352 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-config\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.615480 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.615831 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-tls-assets\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.616531 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-web-config\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.627626 4745 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.627901 4745 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-addab583-525e-4905-8a3f-793e96036058\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-addab583-525e-4905-8a3f-793e96036058\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bf3959d7e61aba1372b33c3fcff87961bc23720279adcfcf4e493102a0ba8ea2/globalmount\"" pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.628368 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-config-out\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.640315 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmrkw\" (UniqueName: \"kubernetes.io/projected/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-kube-api-access-wmrkw\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:20 crc kubenswrapper[4745]: I1208 00:35:20.681324 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-addab583-525e-4905-8a3f-793e96036058\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-addab583-525e-4905-8a3f-793e96036058\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:21 crc kubenswrapper[4745]: I1208 00:35:21.129970 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:21 crc kubenswrapper[4745]: E1208 00:35:21.130221 4745 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Dec 08 00:35:21 crc kubenswrapper[4745]: E1208 00:35:21.130387 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls podName:16fb1bc8-8e50-4d0b-ae02-040249f1bf88 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:22.130346474 +0000 UTC m=+1677.559552804 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "16fb1bc8-8e50-4d0b-ae02-040249f1bf88") : secret "default-prometheus-proxy-tls" not found Dec 08 00:35:22 crc kubenswrapper[4745]: I1208 00:35:22.145283 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:22 crc kubenswrapper[4745]: I1208 00:35:22.149437 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/16fb1bc8-8e50-4d0b-ae02-040249f1bf88-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"16fb1bc8-8e50-4d0b-ae02-040249f1bf88\") " pod="service-telemetry/prometheus-default-0" Dec 08 00:35:22 crc kubenswrapper[4745]: I1208 00:35:22.206638 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Dec 08 00:35:22 crc kubenswrapper[4745]: W1208 00:35:22.485471 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16fb1bc8_8e50_4d0b_ae02_040249f1bf88.slice/crio-8fb0374eb59727f20522acc20973221056c4d6907830e74a5c8a9d217b84566a WatchSource:0}: Error finding container 8fb0374eb59727f20522acc20973221056c4d6907830e74a5c8a9d217b84566a: Status 404 returned error can't find the container with id 8fb0374eb59727f20522acc20973221056c4d6907830e74a5c8a9d217b84566a Dec 08 00:35:22 crc kubenswrapper[4745]: I1208 00:35:22.490610 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 08 00:35:22 crc kubenswrapper[4745]: I1208 00:35:22.627422 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"16fb1bc8-8e50-4d0b-ae02-040249f1bf88","Type":"ContainerStarted","Data":"8fb0374eb59727f20522acc20973221056c4d6907830e74a5c8a9d217b84566a"} Dec 08 00:35:24 crc kubenswrapper[4745]: I1208 00:35:24.888602 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:35:24 crc kubenswrapper[4745]: E1208 00:35:24.889096 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:35:27 crc kubenswrapper[4745]: I1208 00:35:27.661051 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"16fb1bc8-8e50-4d0b-ae02-040249f1bf88","Type":"ContainerStarted","Data":"985f7872086e3b575bed38ad6163332716813dcc798e9278a5da88fbd8e1dae1"} Dec 08 00:35:29 crc kubenswrapper[4745]: I1208 00:35:29.939873 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-jwn74"] Dec 08 00:35:29 crc kubenswrapper[4745]: I1208 00:35:29.941202 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" Dec 08 00:35:29 crc kubenswrapper[4745]: I1208 00:35:29.949850 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-jwn74"] Dec 08 00:35:30 crc kubenswrapper[4745]: I1208 00:35:30.063228 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m4kx\" (UniqueName: \"kubernetes.io/projected/9a588f99-61be-4bfc-b1b6-c444e06c2ada-kube-api-access-2m4kx\") pod \"default-snmp-webhook-6856cfb745-jwn74\" (UID: \"9a588f99-61be-4bfc-b1b6-c444e06c2ada\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" Dec 08 00:35:30 crc kubenswrapper[4745]: I1208 00:35:30.164983 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m4kx\" (UniqueName: \"kubernetes.io/projected/9a588f99-61be-4bfc-b1b6-c444e06c2ada-kube-api-access-2m4kx\") pod \"default-snmp-webhook-6856cfb745-jwn74\" (UID: \"9a588f99-61be-4bfc-b1b6-c444e06c2ada\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" Dec 08 00:35:30 crc kubenswrapper[4745]: I1208 00:35:30.196290 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m4kx\" (UniqueName: \"kubernetes.io/projected/9a588f99-61be-4bfc-b1b6-c444e06c2ada-kube-api-access-2m4kx\") pod \"default-snmp-webhook-6856cfb745-jwn74\" (UID: \"9a588f99-61be-4bfc-b1b6-c444e06c2ada\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" Dec 08 00:35:30 crc kubenswrapper[4745]: I1208 00:35:30.261226 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" Dec 08 00:35:30 crc kubenswrapper[4745]: I1208 00:35:30.545959 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-jwn74"] Dec 08 00:35:30 crc kubenswrapper[4745]: W1208 00:35:30.558036 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a588f99_61be_4bfc_b1b6_c444e06c2ada.slice/crio-aa021cfec5ade9deaf4acfdc2aafcb437b8c9f4bd9fdb95a722febf1a64d2d03 WatchSource:0}: Error finding container aa021cfec5ade9deaf4acfdc2aafcb437b8c9f4bd9fdb95a722febf1a64d2d03: Status 404 returned error can't find the container with id aa021cfec5ade9deaf4acfdc2aafcb437b8c9f4bd9fdb95a722febf1a64d2d03 Dec 08 00:35:30 crc kubenswrapper[4745]: I1208 00:35:30.690730 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" event={"ID":"9a588f99-61be-4bfc-b1b6-c444e06c2ada","Type":"ContainerStarted","Data":"aa021cfec5ade9deaf4acfdc2aafcb437b8c9f4bd9fdb95a722febf1a64d2d03"} Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.721811 4745 generic.go:334] "Generic (PLEG): container finished" podID="16fb1bc8-8e50-4d0b-ae02-040249f1bf88" containerID="985f7872086e3b575bed38ad6163332716813dcc798e9278a5da88fbd8e1dae1" exitCode=0 Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.722119 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"16fb1bc8-8e50-4d0b-ae02-040249f1bf88","Type":"ContainerDied","Data":"985f7872086e3b575bed38ad6163332716813dcc798e9278a5da88fbd8e1dae1"} Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.802514 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.807189 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.813213 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-cluster-tls-config" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.813541 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.813648 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-ngk6m" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.813560 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.813853 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.813875 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.822561 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.930614 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsx76\" (UniqueName: \"kubernetes.io/projected/58680707-df73-4d46-8148-5410ac829436-kube-api-access-gsx76\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.930664 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-web-config\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.930692 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/58680707-df73-4d46-8148-5410ac829436-tls-assets\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.930973 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.931054 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.931285 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/58680707-df73-4d46-8148-5410ac829436-config-out\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.931365 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5036eba5-284f-48de-a234-f33a3cb1617f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5036eba5-284f-48de-a234-f33a3cb1617f\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.931434 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-config-volume\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:33 crc kubenswrapper[4745]: I1208 00:35:33.931514 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033311 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-config-volume\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033359 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033415 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsx76\" (UniqueName: \"kubernetes.io/projected/58680707-df73-4d46-8148-5410ac829436-kube-api-access-gsx76\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033432 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-web-config\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033450 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/58680707-df73-4d46-8148-5410ac829436-tls-assets\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033479 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033503 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033540 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/58680707-df73-4d46-8148-5410ac829436-config-out\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.033568 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5036eba5-284f-48de-a234-f33a3cb1617f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5036eba5-284f-48de-a234-f33a3cb1617f\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: E1208 00:35:34.033858 4745 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Dec 08 00:35:34 crc kubenswrapper[4745]: E1208 00:35:34.033946 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls podName:58680707-df73-4d46-8148-5410ac829436 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:34.533908367 +0000 UTC m=+1689.963114667 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "58680707-df73-4d46-8148-5410ac829436") : secret "default-alertmanager-proxy-tls" not found Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.036874 4745 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.036904 4745 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5036eba5-284f-48de-a234-f33a3cb1617f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5036eba5-284f-48de-a234-f33a3cb1617f\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/358401d45448c20d07682ad11385cc697cc6d4f6cfabf7c2997d21879d26d470/globalmount\"" pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.039589 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.039752 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-config-volume\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.040363 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/58680707-df73-4d46-8148-5410ac829436-config-out\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.048975 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/58680707-df73-4d46-8148-5410ac829436-tls-assets\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.049344 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.052006 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-web-config\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.053560 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsx76\" (UniqueName: \"kubernetes.io/projected/58680707-df73-4d46-8148-5410ac829436-kube-api-access-gsx76\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.070040 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5036eba5-284f-48de-a234-f33a3cb1617f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5036eba5-284f-48de-a234-f33a3cb1617f\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: I1208 00:35:34.542677 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:34 crc kubenswrapper[4745]: E1208 00:35:34.542822 4745 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Dec 08 00:35:34 crc kubenswrapper[4745]: E1208 00:35:34.542896 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls podName:58680707-df73-4d46-8148-5410ac829436 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:35.542873439 +0000 UTC m=+1690.972079759 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "58680707-df73-4d46-8148-5410ac829436") : secret "default-alertmanager-proxy-tls" not found Dec 08 00:35:35 crc kubenswrapper[4745]: I1208 00:35:35.557563 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:35 crc kubenswrapper[4745]: E1208 00:35:35.557750 4745 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Dec 08 00:35:35 crc kubenswrapper[4745]: E1208 00:35:35.557846 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls podName:58680707-df73-4d46-8148-5410ac829436 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:37.557823085 +0000 UTC m=+1692.987029405 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "58680707-df73-4d46-8148-5410ac829436") : secret "default-alertmanager-proxy-tls" not found Dec 08 00:35:37 crc kubenswrapper[4745]: I1208 00:35:37.592621 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:37 crc kubenswrapper[4745]: I1208 00:35:37.597273 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/58680707-df73-4d46-8148-5410ac829436-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"58680707-df73-4d46-8148-5410ac829436\") " pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:37 crc kubenswrapper[4745]: I1208 00:35:37.732462 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Dec 08 00:35:37 crc kubenswrapper[4745]: I1208 00:35:37.882312 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:35:37 crc kubenswrapper[4745]: E1208 00:35:37.882751 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:35:38 crc kubenswrapper[4745]: I1208 00:35:38.658151 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 08 00:35:38 crc kubenswrapper[4745]: W1208 00:35:38.743885 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58680707_df73_4d46_8148_5410ac829436.slice/crio-03f337a78a03718e9457532785dd936414a961db7619e05d6263eea9ddfea691 WatchSource:0}: Error finding container 03f337a78a03718e9457532785dd936414a961db7619e05d6263eea9ddfea691: Status 404 returned error can't find the container with id 03f337a78a03718e9457532785dd936414a961db7619e05d6263eea9ddfea691 Dec 08 00:35:38 crc kubenswrapper[4745]: I1208 00:35:38.770751 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"58680707-df73-4d46-8148-5410ac829436","Type":"ContainerStarted","Data":"03f337a78a03718e9457532785dd936414a961db7619e05d6263eea9ddfea691"} Dec 08 00:35:39 crc kubenswrapper[4745]: I1208 00:35:39.780816 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" event={"ID":"9a588f99-61be-4bfc-b1b6-c444e06c2ada","Type":"ContainerStarted","Data":"67e9b816b01c52ade39ec0db083548fca7e6861a788701064fdd3a8767e2a6cc"} Dec 08 00:35:39 crc kubenswrapper[4745]: I1208 00:35:39.797266 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-6856cfb745-jwn74" podStartSLOduration=2.617265962 podStartE2EDuration="10.797248357s" podCreationTimestamp="2025-12-08 00:35:29 +0000 UTC" firstStartedPulling="2025-12-08 00:35:30.560161892 +0000 UTC m=+1685.989368182" lastFinishedPulling="2025-12-08 00:35:38.740144277 +0000 UTC m=+1694.169350577" observedRunningTime="2025-12-08 00:35:39.7954553 +0000 UTC m=+1695.224661610" watchObservedRunningTime="2025-12-08 00:35:39.797248357 +0000 UTC m=+1695.226454657" Dec 08 00:35:40 crc kubenswrapper[4745]: I1208 00:35:40.791593 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"58680707-df73-4d46-8148-5410ac829436","Type":"ContainerStarted","Data":"2f358e404f1172b3f58f3582afa5952672b21b6a873315babfe0a4887262efef"} Dec 08 00:35:42 crc kubenswrapper[4745]: I1208 00:35:42.804205 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"16fb1bc8-8e50-4d0b-ae02-040249f1bf88","Type":"ContainerStarted","Data":"d18438d32a1f378a932a511e2e5d7ac5cb7ff22440281ffb4ec4fb0afee07cf9"} Dec 08 00:35:45 crc kubenswrapper[4745]: I1208 00:35:45.830051 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"16fb1bc8-8e50-4d0b-ae02-040249f1bf88","Type":"ContainerStarted","Data":"99a9f844bd589f5fa21ad07c41ec042fe2de02e26ba867b0933c0192e47c71d0"} Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.174357 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4"] Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.176735 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.179568 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-jzzqk" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.180427 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.180657 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.180767 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.185626 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4"] Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.318689 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.318740 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/c2126c0f-717c-4df9-9009-8248c9cd99c4-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.318765 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4vl5\" (UniqueName: \"kubernetes.io/projected/c2126c0f-717c-4df9-9009-8248c9cd99c4-kube-api-access-x4vl5\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.318783 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/c2126c0f-717c-4df9-9009-8248c9cd99c4-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.318803 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.420491 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.420557 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/c2126c0f-717c-4df9-9009-8248c9cd99c4-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.420587 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4vl5\" (UniqueName: \"kubernetes.io/projected/c2126c0f-717c-4df9-9009-8248c9cd99c4-kube-api-access-x4vl5\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.420608 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/c2126c0f-717c-4df9-9009-8248c9cd99c4-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.420635 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: E1208 00:35:46.420850 4745 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Dec 08 00:35:46 crc kubenswrapper[4745]: E1208 00:35:46.420912 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls podName:c2126c0f-717c-4df9-9009-8248c9cd99c4 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:46.920890586 +0000 UTC m=+1702.350096886 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" (UID: "c2126c0f-717c-4df9-9009-8248c9cd99c4") : secret "default-cloud1-coll-meter-proxy-tls" not found Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.422781 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/c2126c0f-717c-4df9-9009-8248c9cd99c4-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.422786 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/c2126c0f-717c-4df9-9009-8248c9cd99c4-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.427387 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.436611 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4vl5\" (UniqueName: \"kubernetes.io/projected/c2126c0f-717c-4df9-9009-8248c9cd99c4-kube-api-access-x4vl5\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.838594 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"58680707-df73-4d46-8148-5410ac829436","Type":"ContainerDied","Data":"2f358e404f1172b3f58f3582afa5952672b21b6a873315babfe0a4887262efef"} Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.838514 4745 generic.go:334] "Generic (PLEG): container finished" podID="58680707-df73-4d46-8148-5410ac829436" containerID="2f358e404f1172b3f58f3582afa5952672b21b6a873315babfe0a4887262efef" exitCode=0 Dec 08 00:35:46 crc kubenswrapper[4745]: I1208 00:35:46.928659 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:46 crc kubenswrapper[4745]: E1208 00:35:46.929098 4745 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Dec 08 00:35:46 crc kubenswrapper[4745]: E1208 00:35:46.929153 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls podName:c2126c0f-717c-4df9-9009-8248c9cd99c4 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:47.92913605 +0000 UTC m=+1703.358342350 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" (UID: "c2126c0f-717c-4df9-9009-8248c9cd99c4") : secret "default-cloud1-coll-meter-proxy-tls" not found Dec 08 00:35:47 crc kubenswrapper[4745]: I1208 00:35:47.942014 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:47 crc kubenswrapper[4745]: I1208 00:35:47.958685 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/c2126c0f-717c-4df9-9009-8248c9cd99c4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4\" (UID: \"c2126c0f-717c-4df9-9009-8248c9cd99c4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:47 crc kubenswrapper[4745]: I1208 00:35:47.996642 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.642917 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl"] Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.644312 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.646758 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.648110 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.651461 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl"] Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.752626 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.752789 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/a97f4588-f967-49e3-afcd-7c9a950b00ec-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.752905 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c79s4\" (UniqueName: \"kubernetes.io/projected/a97f4588-f967-49e3-afcd-7c9a950b00ec-kube-api-access-c79s4\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.752993 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.753044 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/a97f4588-f967-49e3-afcd-7c9a950b00ec-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.854706 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.854776 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/a97f4588-f967-49e3-afcd-7c9a950b00ec-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.854826 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c79s4\" (UniqueName: \"kubernetes.io/projected/a97f4588-f967-49e3-afcd-7c9a950b00ec-kube-api-access-c79s4\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.854846 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.854876 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/a97f4588-f967-49e3-afcd-7c9a950b00ec-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: E1208 00:35:48.855214 4745 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 08 00:35:48 crc kubenswrapper[4745]: E1208 00:35:48.855278 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls podName:a97f4588-f967-49e3-afcd-7c9a950b00ec nodeName:}" failed. No retries permitted until 2025-12-08 00:35:49.355258838 +0000 UTC m=+1704.784465138 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" (UID: "a97f4588-f967-49e3-afcd-7c9a950b00ec") : secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.855796 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/a97f4588-f967-49e3-afcd-7c9a950b00ec-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.856155 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/a97f4588-f967-49e3-afcd-7c9a950b00ec-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.873590 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:48 crc kubenswrapper[4745]: I1208 00:35:48.876375 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c79s4\" (UniqueName: \"kubernetes.io/projected/a97f4588-f967-49e3-afcd-7c9a950b00ec-kube-api-access-c79s4\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:49 crc kubenswrapper[4745]: I1208 00:35:49.361993 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:49 crc kubenswrapper[4745]: E1208 00:35:49.362193 4745 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 08 00:35:49 crc kubenswrapper[4745]: E1208 00:35:49.362268 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls podName:a97f4588-f967-49e3-afcd-7c9a950b00ec nodeName:}" failed. No retries permitted until 2025-12-08 00:35:50.362250169 +0000 UTC m=+1705.791456469 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" (UID: "a97f4588-f967-49e3-afcd-7c9a950b00ec") : secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 08 00:35:49 crc kubenswrapper[4745]: I1208 00:35:49.883303 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:35:49 crc kubenswrapper[4745]: E1208 00:35:49.883570 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:35:50 crc kubenswrapper[4745]: I1208 00:35:50.375895 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:50 crc kubenswrapper[4745]: I1208 00:35:50.381135 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a97f4588-f967-49e3-afcd-7c9a950b00ec-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl\" (UID: \"a97f4588-f967-49e3-afcd-7c9a950b00ec\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:50 crc kubenswrapper[4745]: I1208 00:35:50.461239 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.844445 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v"] Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.845738 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.849898 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.850112 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.861057 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v"] Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.922300 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.922348 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/f0030446-12f9-44fe-a42f-b8645bc0a9e2-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.922426 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/f0030446-12f9-44fe-a42f-b8645bc0a9e2-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.922450 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.922480 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcvtd\" (UniqueName: \"kubernetes.io/projected/f0030446-12f9-44fe-a42f-b8645bc0a9e2-kube-api-access-wcvtd\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:52 crc kubenswrapper[4745]: I1208 00:35:52.960442 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4"] Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.023885 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.024183 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/f0030446-12f9-44fe-a42f-b8645bc0a9e2-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.024239 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/f0030446-12f9-44fe-a42f-b8645bc0a9e2-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.024257 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.024283 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcvtd\" (UniqueName: \"kubernetes.io/projected/f0030446-12f9-44fe-a42f-b8645bc0a9e2-kube-api-access-wcvtd\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: E1208 00:35:53.024726 4745 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Dec 08 00:35:53 crc kubenswrapper[4745]: E1208 00:35:53.024807 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls podName:f0030446-12f9-44fe-a42f-b8645bc0a9e2 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:53.524787593 +0000 UTC m=+1708.953993893 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" (UID: "f0030446-12f9-44fe-a42f-b8645bc0a9e2") : secret "default-cloud1-sens-meter-proxy-tls" not found Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.025359 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/f0030446-12f9-44fe-a42f-b8645bc0a9e2-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.026749 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/f0030446-12f9-44fe-a42f-b8645bc0a9e2-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.031891 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.049790 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcvtd\" (UniqueName: \"kubernetes.io/projected/f0030446-12f9-44fe-a42f-b8645bc0a9e2-kube-api-access-wcvtd\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.125089 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl"] Dec 08 00:35:53 crc kubenswrapper[4745]: W1208 00:35:53.129111 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda97f4588_f967_49e3_afcd_7c9a950b00ec.slice/crio-eff26d80f68731ffc57f6b54774410a6e16c514d0121a43eb8e53640220401f9 WatchSource:0}: Error finding container eff26d80f68731ffc57f6b54774410a6e16c514d0121a43eb8e53640220401f9: Status 404 returned error can't find the container with id eff26d80f68731ffc57f6b54774410a6e16c514d0121a43eb8e53640220401f9 Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.530306 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:53 crc kubenswrapper[4745]: E1208 00:35:53.530456 4745 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Dec 08 00:35:53 crc kubenswrapper[4745]: E1208 00:35:53.530520 4745 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls podName:f0030446-12f9-44fe-a42f-b8645bc0a9e2 nodeName:}" failed. No retries permitted until 2025-12-08 00:35:54.53050246 +0000 UTC m=+1709.959708760 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" (UID: "f0030446-12f9-44fe-a42f-b8645bc0a9e2") : secret "default-cloud1-sens-meter-proxy-tls" not found Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.888059 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"16fb1bc8-8e50-4d0b-ae02-040249f1bf88","Type":"ContainerStarted","Data":"f3b1e5d5cb4c19fd3b312ef4062fc24da38938048e83f349ec43db4db4142450"} Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.890659 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"58680707-df73-4d46-8148-5410ac829436","Type":"ContainerStarted","Data":"fab5edf8651cde154dd50c64833019bd9ceac59ad11f3d4cd2bab4639e68201f"} Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.902543 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerStarted","Data":"eff26d80f68731ffc57f6b54774410a6e16c514d0121a43eb8e53640220401f9"} Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.906871 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerStarted","Data":"c27b28bf9129b286e30c7495543abaa6e68102131bfc853bff900495b96c85ad"} Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.907263 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerStarted","Data":"e96d1791fae65d6384172ac80f822b4aa4bf7babaed1c123ea707f624d88e59c"} Dec 08 00:35:53 crc kubenswrapper[4745]: I1208 00:35:53.922751 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.784149988 podStartE2EDuration="34.922729917s" podCreationTimestamp="2025-12-08 00:35:19 +0000 UTC" firstStartedPulling="2025-12-08 00:35:22.487376652 +0000 UTC m=+1677.916582952" lastFinishedPulling="2025-12-08 00:35:52.625956581 +0000 UTC m=+1708.055162881" observedRunningTime="2025-12-08 00:35:53.915563877 +0000 UTC m=+1709.344770187" watchObservedRunningTime="2025-12-08 00:35:53.922729917 +0000 UTC m=+1709.351936227" Dec 08 00:35:54 crc kubenswrapper[4745]: I1208 00:35:54.543890 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:54 crc kubenswrapper[4745]: I1208 00:35:54.550960 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f0030446-12f9-44fe-a42f-b8645bc0a9e2-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v\" (UID: \"f0030446-12f9-44fe-a42f-b8645bc0a9e2\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:54 crc kubenswrapper[4745]: I1208 00:35:54.722983 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" Dec 08 00:35:54 crc kubenswrapper[4745]: I1208 00:35:54.929996 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerStarted","Data":"7169b1f9b30ec1a397ac518a63700cf20573f8594ce6c8f5050d23b86363b7fe"} Dec 08 00:35:54 crc kubenswrapper[4745]: I1208 00:35:54.935294 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"58680707-df73-4d46-8148-5410ac829436","Type":"ContainerStarted","Data":"54c60237bb8bb72a910dc930cdaa964a0615de703ba60204ead0db3ef053c476"} Dec 08 00:35:55 crc kubenswrapper[4745]: I1208 00:35:55.152881 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v"] Dec 08 00:35:55 crc kubenswrapper[4745]: W1208 00:35:55.170989 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0030446_12f9_44fe_a42f_b8645bc0a9e2.slice/crio-042dd9948fd5bbff4c0f598212477056266bbf084f8d2893e5c8da1a57f3b70d WatchSource:0}: Error finding container 042dd9948fd5bbff4c0f598212477056266bbf084f8d2893e5c8da1a57f3b70d: Status 404 returned error can't find the container with id 042dd9948fd5bbff4c0f598212477056266bbf084f8d2893e5c8da1a57f3b70d Dec 08 00:35:55 crc kubenswrapper[4745]: I1208 00:35:55.949275 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"58680707-df73-4d46-8148-5410ac829436","Type":"ContainerStarted","Data":"0459bb1611840f0ce97d277a85c01a1d129c5d69e0f0e151fc13a5bee13e9e27"} Dec 08 00:35:55 crc kubenswrapper[4745]: I1208 00:35:55.950867 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerStarted","Data":"2a87012b697dc747efa073c364b544bde0f7756f3b8db736cdbdbdb1332b5970"} Dec 08 00:35:55 crc kubenswrapper[4745]: I1208 00:35:55.951183 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerStarted","Data":"042dd9948fd5bbff4c0f598212477056266bbf084f8d2893e5c8da1a57f3b70d"} Dec 08 00:35:55 crc kubenswrapper[4745]: I1208 00:35:55.980341 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=15.487801525 podStartE2EDuration="23.98032523s" podCreationTimestamp="2025-12-08 00:35:32 +0000 UTC" firstStartedPulling="2025-12-08 00:35:46.840409795 +0000 UTC m=+1702.269616095" lastFinishedPulling="2025-12-08 00:35:55.3329335 +0000 UTC m=+1710.762139800" observedRunningTime="2025-12-08 00:35:55.977353572 +0000 UTC m=+1711.406559872" watchObservedRunningTime="2025-12-08 00:35:55.98032523 +0000 UTC m=+1711.409531530" Dec 08 00:35:57 crc kubenswrapper[4745]: I1208 00:35:57.207639 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.129451 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6"] Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.130627 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.136446 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.136587 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.143383 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6"] Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.230791 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/28006f01-5d82-4b3c-8a5c-f77fa8f24081-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.230950 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw8v6\" (UniqueName: \"kubernetes.io/projected/28006f01-5d82-4b3c-8a5c-f77fa8f24081-kube-api-access-cw8v6\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.230997 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/28006f01-5d82-4b3c-8a5c-f77fa8f24081-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.231028 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/28006f01-5d82-4b3c-8a5c-f77fa8f24081-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.332210 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/28006f01-5d82-4b3c-8a5c-f77fa8f24081-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.332261 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/28006f01-5d82-4b3c-8a5c-f77fa8f24081-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.332307 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/28006f01-5d82-4b3c-8a5c-f77fa8f24081-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.332365 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw8v6\" (UniqueName: \"kubernetes.io/projected/28006f01-5d82-4b3c-8a5c-f77fa8f24081-kube-api-access-cw8v6\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.603828 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/28006f01-5d82-4b3c-8a5c-f77fa8f24081-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.604715 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/28006f01-5d82-4b3c-8a5c-f77fa8f24081-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.604876 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/28006f01-5d82-4b3c-8a5c-f77fa8f24081-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.614374 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw8v6\" (UniqueName: \"kubernetes.io/projected/28006f01-5d82-4b3c-8a5c-f77fa8f24081-kube-api-access-cw8v6\") pod \"default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6\" (UID: \"28006f01-5d82-4b3c-8a5c-f77fa8f24081\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:00 crc kubenswrapper[4745]: I1208 00:36:00.745433 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.441528 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f"] Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.444127 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.449199 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.464960 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f"] Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.552853 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ae8ae814-8c22-4d63-9907-a4eff4a5f600-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.552918 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2vsj\" (UniqueName: \"kubernetes.io/projected/ae8ae814-8c22-4d63-9907-a4eff4a5f600-kube-api-access-c2vsj\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.552967 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/ae8ae814-8c22-4d63-9907-a4eff4a5f600-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.552988 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ae8ae814-8c22-4d63-9907-a4eff4a5f600-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.653935 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/ae8ae814-8c22-4d63-9907-a4eff4a5f600-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.653993 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ae8ae814-8c22-4d63-9907-a4eff4a5f600-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.654097 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ae8ae814-8c22-4d63-9907-a4eff4a5f600-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.654132 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2vsj\" (UniqueName: \"kubernetes.io/projected/ae8ae814-8c22-4d63-9907-a4eff4a5f600-kube-api-access-c2vsj\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.655229 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ae8ae814-8c22-4d63-9907-a4eff4a5f600-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.655894 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ae8ae814-8c22-4d63-9907-a4eff4a5f600-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.674678 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/ae8ae814-8c22-4d63-9907-a4eff4a5f600-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.674814 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6"] Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.677239 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2vsj\" (UniqueName: \"kubernetes.io/projected/ae8ae814-8c22-4d63-9907-a4eff4a5f600-kube-api-access-c2vsj\") pod \"default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f\" (UID: \"ae8ae814-8c22-4d63-9907-a4eff4a5f600\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.780555 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" Dec 08 00:36:01 crc kubenswrapper[4745]: I1208 00:36:01.883227 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:36:01 crc kubenswrapper[4745]: E1208 00:36:01.883491 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:36:02 crc kubenswrapper[4745]: I1208 00:36:02.050644 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerStarted","Data":"5ced037d64c29fbb7181325f346861b098d7d2cc8331c9c3ccbaa1224f983681"} Dec 08 00:36:02 crc kubenswrapper[4745]: I1208 00:36:02.054479 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerStarted","Data":"412f3e67a029261dcc7f9cc94e413b1f49a29610da29af689e52eaaeed8b999a"} Dec 08 00:36:02 crc kubenswrapper[4745]: I1208 00:36:02.057676 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerStarted","Data":"1b94136cb49005c78ddbea4a41b41cc6148244a35d21ef150730acc5ceebcffc"} Dec 08 00:36:02 crc kubenswrapper[4745]: I1208 00:36:02.060259 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" event={"ID":"28006f01-5d82-4b3c-8a5c-f77fa8f24081","Type":"ContainerStarted","Data":"917e6371e6f7c132a7450f5695591ddc0af42976d8ab53933b032c64152e52d2"} Dec 08 00:36:02 crc kubenswrapper[4745]: I1208 00:36:02.060298 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" event={"ID":"28006f01-5d82-4b3c-8a5c-f77fa8f24081","Type":"ContainerStarted","Data":"009ad8e9875bfb433053bfd54761fde624ab84f071acf5938dc8ac64344d6855"} Dec 08 00:36:02 crc kubenswrapper[4745]: I1208 00:36:02.270714 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f"] Dec 08 00:36:02 crc kubenswrapper[4745]: W1208 00:36:02.272690 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae8ae814_8c22_4d63_9907_a4eff4a5f600.slice/crio-f623e0b4f41bf1608598aa1c6cf4e63ef5abdd049ee61d20096e0d1144299e60 WatchSource:0}: Error finding container f623e0b4f41bf1608598aa1c6cf4e63ef5abdd049ee61d20096e0d1144299e60: Status 404 returned error can't find the container with id f623e0b4f41bf1608598aa1c6cf4e63ef5abdd049ee61d20096e0d1144299e60 Dec 08 00:36:03 crc kubenswrapper[4745]: I1208 00:36:03.072165 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" event={"ID":"ae8ae814-8c22-4d63-9907-a4eff4a5f600","Type":"ContainerStarted","Data":"19d83da768704d1f7cedf27ad7c9c65a3a50d78ed721332d63034ea8b0d95444"} Dec 08 00:36:03 crc kubenswrapper[4745]: I1208 00:36:03.072208 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" event={"ID":"ae8ae814-8c22-4d63-9907-a4eff4a5f600","Type":"ContainerStarted","Data":"f623e0b4f41bf1608598aa1c6cf4e63ef5abdd049ee61d20096e0d1144299e60"} Dec 08 00:36:07 crc kubenswrapper[4745]: I1208 00:36:07.207588 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Dec 08 00:36:07 crc kubenswrapper[4745]: I1208 00:36:07.275279 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Dec 08 00:36:08 crc kubenswrapper[4745]: I1208 00:36:08.181239 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Dec 08 00:36:16 crc kubenswrapper[4745]: I1208 00:36:16.751809 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-x4p4w"] Dec 08 00:36:16 crc kubenswrapper[4745]: I1208 00:36:16.752394 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" podUID="0b5687a7-1989-4d78-8106-1049126d45ff" containerName="default-interconnect" containerID="cri-o://53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f" gracePeriod=30 Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.802082 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.802585 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:prom-https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x4vl5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4_service-telemetry(c2126c0f-717c-4df9-9009-8248c9cd99c4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.803783 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" podUID="c2126c0f-717c-4df9-9009-8248c9cd99c4" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.809398 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.809610 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:prom-https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c79s4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl_service-telemetry(a97f4588-f967-49e3-afcd-7c9a950b00ec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.811524 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" podUID="a97f4588-f967-49e3-afcd-7c9a950b00ec" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.822122 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.822281 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-certs,ReadOnly:false,MountPath:/config/certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c2vsj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f_service-telemetry(ae8ae814-8c22-4d63-9907-a4eff4a5f600): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.823985 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.824012 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" podUID="ae8ae814-8c22-4d63-9907-a4eff4a5f600" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.824101 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:prom-https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wcvtd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v_service-telemetry(f0030446-12f9-44fe-a42f-b8645bc0a9e2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.825846 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" podUID="f0030446-12f9-44fe-a42f-b8645bc0a9e2" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.828190 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.828318 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-certs,ReadOnly:false,MountPath:/config/certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cw8v6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6_service-telemetry(28006f01-5d82-4b3c-8a5c-f77fa8f24081): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.829511 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" podUID="28006f01-5d82-4b3c-8a5c-f77fa8f24081" Dec 08 00:36:16 crc kubenswrapper[4745]: I1208 00:36:16.884619 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:36:16 crc kubenswrapper[4745]: E1208 00:36:16.884850 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.162897 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.176354 4745 generic.go:334] "Generic (PLEG): container finished" podID="0b5687a7-1989-4d78-8106-1049126d45ff" containerID="53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f" exitCode=0 Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.176423 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" event={"ID":"0b5687a7-1989-4d78-8106-1049126d45ff","Type":"ContainerDied","Data":"53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f"} Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.176450 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" event={"ID":"0b5687a7-1989-4d78-8106-1049126d45ff","Type":"ContainerDied","Data":"b0be894bbc434b753bae42bd7341fb1fd3bfabd4bbe43ac8526b92cffee79d3e"} Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.176470 4745 scope.go:117] "RemoveContainer" containerID="53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.176580 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-x4p4w" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.179212 4745 generic.go:334] "Generic (PLEG): container finished" podID="28006f01-5d82-4b3c-8a5c-f77fa8f24081" containerID="917e6371e6f7c132a7450f5695591ddc0af42976d8ab53933b032c64152e52d2" exitCode=0 Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.180353 4745 scope.go:117] "RemoveContainer" containerID="917e6371e6f7c132a7450f5695591ddc0af42976d8ab53933b032c64152e52d2" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.180549 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" event={"ID":"28006f01-5d82-4b3c-8a5c-f77fa8f24081","Type":"ContainerDied","Data":"917e6371e6f7c132a7450f5695591ddc0af42976d8ab53933b032c64152e52d2"} Dec 08 00:36:17 crc kubenswrapper[4745]: E1208 00:36:17.183202 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" podUID="f0030446-12f9-44fe-a42f-b8645bc0a9e2" Dec 08 00:36:17 crc kubenswrapper[4745]: E1208 00:36:17.183491 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" podUID="ae8ae814-8c22-4d63-9907-a4eff4a5f600" Dec 08 00:36:17 crc kubenswrapper[4745]: E1208 00:36:17.183601 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" podUID="a97f4588-f967-49e3-afcd-7c9a950b00ec" Dec 08 00:36:17 crc kubenswrapper[4745]: E1208 00:36:17.185122 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" podUID="c2126c0f-717c-4df9-9009-8248c9cd99c4" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.212845 4745 scope.go:117] "RemoveContainer" containerID="53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f" Dec 08 00:36:17 crc kubenswrapper[4745]: E1208 00:36:17.217589 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f\": container with ID starting with 53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f not found: ID does not exist" containerID="53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.217657 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f"} err="failed to get container status \"53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f\": rpc error: code = NotFound desc = could not find container \"53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f\": container with ID starting with 53d9dc8e75ec87028e486ddf7976b84a0416e69bfd36a5d6e657157059579c6f not found: ID does not exist" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.360230 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-credentials\") pod \"0b5687a7-1989-4d78-8106-1049126d45ff\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.360680 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-ca\") pod \"0b5687a7-1989-4d78-8106-1049126d45ff\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.361238 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "0b5687a7-1989-4d78-8106-1049126d45ff" (UID: "0b5687a7-1989-4d78-8106-1049126d45ff"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.361401 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-config\") pod \"0b5687a7-1989-4d78-8106-1049126d45ff\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.361488 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-ca\") pod \"0b5687a7-1989-4d78-8106-1049126d45ff\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.361558 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rmq6\" (UniqueName: \"kubernetes.io/projected/0b5687a7-1989-4d78-8106-1049126d45ff-kube-api-access-5rmq6\") pod \"0b5687a7-1989-4d78-8106-1049126d45ff\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.361605 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-users\") pod \"0b5687a7-1989-4d78-8106-1049126d45ff\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.361711 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-credentials\") pod \"0b5687a7-1989-4d78-8106-1049126d45ff\" (UID: \"0b5687a7-1989-4d78-8106-1049126d45ff\") " Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.362438 4745 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.365203 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "0b5687a7-1989-4d78-8106-1049126d45ff" (UID: "0b5687a7-1989-4d78-8106-1049126d45ff"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.366053 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "0b5687a7-1989-4d78-8106-1049126d45ff" (UID: "0b5687a7-1989-4d78-8106-1049126d45ff"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.366320 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "0b5687a7-1989-4d78-8106-1049126d45ff" (UID: "0b5687a7-1989-4d78-8106-1049126d45ff"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.366544 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "0b5687a7-1989-4d78-8106-1049126d45ff" (UID: "0b5687a7-1989-4d78-8106-1049126d45ff"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.366541 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b5687a7-1989-4d78-8106-1049126d45ff-kube-api-access-5rmq6" (OuterVolumeSpecName: "kube-api-access-5rmq6") pod "0b5687a7-1989-4d78-8106-1049126d45ff" (UID: "0b5687a7-1989-4d78-8106-1049126d45ff"). InnerVolumeSpecName "kube-api-access-5rmq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.367292 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "0b5687a7-1989-4d78-8106-1049126d45ff" (UID: "0b5687a7-1989-4d78-8106-1049126d45ff"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.463028 4745 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.463064 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rmq6\" (UniqueName: \"kubernetes.io/projected/0b5687a7-1989-4d78-8106-1049126d45ff-kube-api-access-5rmq6\") on node \"crc\" DevicePath \"\"" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.463080 4745 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-sasl-users\") on node \"crc\" DevicePath \"\"" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.463096 4745 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.463109 4745 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.463123 4745 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/0b5687a7-1989-4d78-8106-1049126d45ff-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.514019 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-x4p4w"] Dec 08 00:36:17 crc kubenswrapper[4745]: E1208 00:36:17.515615 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" podUID="28006f01-5d82-4b3c-8a5c-f77fa8f24081" Dec 08 00:36:17 crc kubenswrapper[4745]: I1208 00:36:17.523338 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-x4p4w"] Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.190796 4745 generic.go:334] "Generic (PLEG): container finished" podID="f0030446-12f9-44fe-a42f-b8645bc0a9e2" containerID="5ced037d64c29fbb7181325f346861b098d7d2cc8331c9c3ccbaa1224f983681" exitCode=0 Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.190855 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerDied","Data":"5ced037d64c29fbb7181325f346861b098d7d2cc8331c9c3ccbaa1224f983681"} Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.191557 4745 scope.go:117] "RemoveContainer" containerID="5ced037d64c29fbb7181325f346861b098d7d2cc8331c9c3ccbaa1224f983681" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.195087 4745 generic.go:334] "Generic (PLEG): container finished" podID="a97f4588-f967-49e3-afcd-7c9a950b00ec" containerID="412f3e67a029261dcc7f9cc94e413b1f49a29610da29af689e52eaaeed8b999a" exitCode=0 Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.195181 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerDied","Data":"412f3e67a029261dcc7f9cc94e413b1f49a29610da29af689e52eaaeed8b999a"} Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.196160 4745 scope.go:117] "RemoveContainer" containerID="412f3e67a029261dcc7f9cc94e413b1f49a29610da29af689e52eaaeed8b999a" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.207521 4745 generic.go:334] "Generic (PLEG): container finished" podID="ae8ae814-8c22-4d63-9907-a4eff4a5f600" containerID="19d83da768704d1f7cedf27ad7c9c65a3a50d78ed721332d63034ea8b0d95444" exitCode=0 Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.207651 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" event={"ID":"ae8ae814-8c22-4d63-9907-a4eff4a5f600","Type":"ContainerDied","Data":"19d83da768704d1f7cedf27ad7c9c65a3a50d78ed721332d63034ea8b0d95444"} Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.208454 4745 scope.go:117] "RemoveContainer" containerID="19d83da768704d1f7cedf27ad7c9c65a3a50d78ed721332d63034ea8b0d95444" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.213747 4745 generic.go:334] "Generic (PLEG): container finished" podID="c2126c0f-717c-4df9-9009-8248c9cd99c4" containerID="1b94136cb49005c78ddbea4a41b41cc6148244a35d21ef150730acc5ceebcffc" exitCode=0 Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.214095 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerDied","Data":"1b94136cb49005c78ddbea4a41b41cc6148244a35d21ef150730acc5ceebcffc"} Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.215371 4745 scope.go:117] "RemoveContainer" containerID="1b94136cb49005c78ddbea4a41b41cc6148244a35d21ef150730acc5ceebcffc" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.226583 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" event={"ID":"28006f01-5d82-4b3c-8a5c-f77fa8f24081","Type":"ContainerStarted","Data":"fd73832f99a46f43b13303ad5e28640015ce37af5a1a5f84d132cacc23136248"} Dec 08 00:36:18 crc kubenswrapper[4745]: E1208 00:36:18.232283 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" podUID="28006f01-5d82-4b3c-8a5c-f77fa8f24081" Dec 08 00:36:18 crc kubenswrapper[4745]: E1208 00:36:18.541635 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" podUID="c2126c0f-717c-4df9-9009-8248c9cd99c4" Dec 08 00:36:18 crc kubenswrapper[4745]: E1208 00:36:18.546385 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" podUID="f0030446-12f9-44fe-a42f-b8645bc0a9e2" Dec 08 00:36:18 crc kubenswrapper[4745]: E1208 00:36:18.547381 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" podUID="ae8ae814-8c22-4d63-9907-a4eff4a5f600" Dec 08 00:36:18 crc kubenswrapper[4745]: E1208 00:36:18.568373 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" podUID="a97f4588-f967-49e3-afcd-7c9a950b00ec" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.712028 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-n42pf"] Dec 08 00:36:18 crc kubenswrapper[4745]: E1208 00:36:18.712339 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b5687a7-1989-4d78-8106-1049126d45ff" containerName="default-interconnect" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.712354 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b5687a7-1989-4d78-8106-1049126d45ff" containerName="default-interconnect" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.712491 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b5687a7-1989-4d78-8106-1049126d45ff" containerName="default-interconnect" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.713307 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.716229 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.716317 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-zkcw9" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.716554 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.718879 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.719262 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.719379 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.719962 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.722576 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-n42pf"] Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.790729 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.790797 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.790838 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3b8650ca-762b-4a67-a77a-9de3c1fcde65-sasl-config\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.790878 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-sasl-users\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.791393 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.791418 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn8ws\" (UniqueName: \"kubernetes.io/projected/3b8650ca-762b-4a67-a77a-9de3c1fcde65-kube-api-access-gn8ws\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.791448 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.892668 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.892717 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn8ws\" (UniqueName: \"kubernetes.io/projected/3b8650ca-762b-4a67-a77a-9de3c1fcde65-kube-api-access-gn8ws\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.892748 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.892843 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.892878 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.893014 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3b8650ca-762b-4a67-a77a-9de3c1fcde65-sasl-config\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.893049 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-sasl-users\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.894366 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b5687a7-1989-4d78-8106-1049126d45ff" path="/var/lib/kubelet/pods/0b5687a7-1989-4d78-8106-1049126d45ff/volumes" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.894577 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3b8650ca-762b-4a67-a77a-9de3c1fcde65-sasl-config\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.900947 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.901537 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-sasl-users\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.903070 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.903247 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.909766 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3b8650ca-762b-4a67-a77a-9de3c1fcde65-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:18 crc kubenswrapper[4745]: I1208 00:36:18.930542 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn8ws\" (UniqueName: \"kubernetes.io/projected/3b8650ca-762b-4a67-a77a-9de3c1fcde65-kube-api-access-gn8ws\") pod \"default-interconnect-68864d46cb-n42pf\" (UID: \"3b8650ca-762b-4a67-a77a-9de3c1fcde65\") " pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.033482 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-n42pf" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.236448 4745 generic.go:334] "Generic (PLEG): container finished" podID="28006f01-5d82-4b3c-8a5c-f77fa8f24081" containerID="fd73832f99a46f43b13303ad5e28640015ce37af5a1a5f84d132cacc23136248" exitCode=0 Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.236719 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" event={"ID":"28006f01-5d82-4b3c-8a5c-f77fa8f24081","Type":"ContainerDied","Data":"fd73832f99a46f43b13303ad5e28640015ce37af5a1a5f84d132cacc23136248"} Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.237063 4745 scope.go:117] "RemoveContainer" containerID="917e6371e6f7c132a7450f5695591ddc0af42976d8ab53933b032c64152e52d2" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.237399 4745 scope.go:117] "RemoveContainer" containerID="fd73832f99a46f43b13303ad5e28640015ce37af5a1a5f84d132cacc23136248" Dec 08 00:36:19 crc kubenswrapper[4745]: E1208 00:36:19.241861 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6_service-telemetry(28006f01-5d82-4b3c-8a5c-f77fa8f24081)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" podUID="28006f01-5d82-4b3c-8a5c-f77fa8f24081" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.245331 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerStarted","Data":"b4f7d7591cd84b31befa50f12fb19203659ac28c88b7da2252263d8de1c313f1"} Dec 08 00:36:19 crc kubenswrapper[4745]: E1208 00:36:19.251455 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" podUID="f0030446-12f9-44fe-a42f-b8645bc0a9e2" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.262083 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerStarted","Data":"cffbcead4d2cd1351e7c46e427e3527b6d357e4a9ed70188273a1fedcf601a5f"} Dec 08 00:36:19 crc kubenswrapper[4745]: E1208 00:36:19.264576 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" podUID="a97f4588-f967-49e3-afcd-7c9a950b00ec" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.278159 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" event={"ID":"ae8ae814-8c22-4d63-9907-a4eff4a5f600","Type":"ContainerStarted","Data":"5d63a9528e880843a28bd16be3c45dd3d502b0481fb70eaf271fa7db1bf022d2"} Dec 08 00:36:19 crc kubenswrapper[4745]: E1208 00:36:19.280743 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" podUID="ae8ae814-8c22-4d63-9907-a4eff4a5f600" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.288274 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerStarted","Data":"4106181b0d6914b910a7dd372ae510699e757aa5f3e9a574207c756971bcd432"} Dec 08 00:36:19 crc kubenswrapper[4745]: E1208 00:36:19.289766 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" podUID="c2126c0f-717c-4df9-9009-8248c9cd99c4" Dec 08 00:36:19 crc kubenswrapper[4745]: I1208 00:36:19.484372 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-n42pf"] Dec 08 00:36:19 crc kubenswrapper[4745]: W1208 00:36:19.491389 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b8650ca_762b_4a67_a77a_9de3c1fcde65.slice/crio-7e0fb45d0fc2e5b1c47fc5603274d0a5aab8ca012ba60d6f428edf4d544dccec WatchSource:0}: Error finding container 7e0fb45d0fc2e5b1c47fc5603274d0a5aab8ca012ba60d6f428edf4d544dccec: Status 404 returned error can't find the container with id 7e0fb45d0fc2e5b1c47fc5603274d0a5aab8ca012ba60d6f428edf4d544dccec Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.301770 4745 generic.go:334] "Generic (PLEG): container finished" podID="ae8ae814-8c22-4d63-9907-a4eff4a5f600" containerID="5d63a9528e880843a28bd16be3c45dd3d502b0481fb70eaf271fa7db1bf022d2" exitCode=0 Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.301867 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" event={"ID":"ae8ae814-8c22-4d63-9907-a4eff4a5f600","Type":"ContainerDied","Data":"5d63a9528e880843a28bd16be3c45dd3d502b0481fb70eaf271fa7db1bf022d2"} Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.302308 4745 scope.go:117] "RemoveContainer" containerID="19d83da768704d1f7cedf27ad7c9c65a3a50d78ed721332d63034ea8b0d95444" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.303025 4745 scope.go:117] "RemoveContainer" containerID="5d63a9528e880843a28bd16be3c45dd3d502b0481fb70eaf271fa7db1bf022d2" Dec 08 00:36:20 crc kubenswrapper[4745]: E1208 00:36:20.304616 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f_service-telemetry(ae8ae814-8c22-4d63-9907-a4eff4a5f600)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" podUID="ae8ae814-8c22-4d63-9907-a4eff4a5f600" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.321484 4745 generic.go:334] "Generic (PLEG): container finished" podID="c2126c0f-717c-4df9-9009-8248c9cd99c4" containerID="4106181b0d6914b910a7dd372ae510699e757aa5f3e9a574207c756971bcd432" exitCode=0 Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.321553 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerDied","Data":"4106181b0d6914b910a7dd372ae510699e757aa5f3e9a574207c756971bcd432"} Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.322154 4745 scope.go:117] "RemoveContainer" containerID="4106181b0d6914b910a7dd372ae510699e757aa5f3e9a574207c756971bcd432" Dec 08 00:36:20 crc kubenswrapper[4745]: E1208 00:36:20.324512 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4_service-telemetry(c2126c0f-717c-4df9-9009-8248c9cd99c4)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" podUID="c2126c0f-717c-4df9-9009-8248c9cd99c4" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.328292 4745 scope.go:117] "RemoveContainer" containerID="fd73832f99a46f43b13303ad5e28640015ce37af5a1a5f84d132cacc23136248" Dec 08 00:36:20 crc kubenswrapper[4745]: E1208 00:36:20.329619 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6_service-telemetry(28006f01-5d82-4b3c-8a5c-f77fa8f24081)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" podUID="28006f01-5d82-4b3c-8a5c-f77fa8f24081" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.337019 4745 generic.go:334] "Generic (PLEG): container finished" podID="f0030446-12f9-44fe-a42f-b8645bc0a9e2" containerID="b4f7d7591cd84b31befa50f12fb19203659ac28c88b7da2252263d8de1c313f1" exitCode=0 Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.337086 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerDied","Data":"b4f7d7591cd84b31befa50f12fb19203659ac28c88b7da2252263d8de1c313f1"} Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.338396 4745 scope.go:117] "RemoveContainer" containerID="b4f7d7591cd84b31befa50f12fb19203659ac28c88b7da2252263d8de1c313f1" Dec 08 00:36:20 crc kubenswrapper[4745]: E1208 00:36:20.340823 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v_service-telemetry(f0030446-12f9-44fe-a42f-b8645bc0a9e2)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" podUID="f0030446-12f9-44fe-a42f-b8645bc0a9e2" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.341833 4745 generic.go:334] "Generic (PLEG): container finished" podID="a97f4588-f967-49e3-afcd-7c9a950b00ec" containerID="cffbcead4d2cd1351e7c46e427e3527b6d357e4a9ed70188273a1fedcf601a5f" exitCode=0 Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.342150 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerDied","Data":"cffbcead4d2cd1351e7c46e427e3527b6d357e4a9ed70188273a1fedcf601a5f"} Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.342861 4745 scope.go:117] "RemoveContainer" containerID="cffbcead4d2cd1351e7c46e427e3527b6d357e4a9ed70188273a1fedcf601a5f" Dec 08 00:36:20 crc kubenswrapper[4745]: E1208 00:36:20.344396 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl_service-telemetry(a97f4588-f967-49e3-afcd-7c9a950b00ec)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" podUID="a97f4588-f967-49e3-afcd-7c9a950b00ec" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.348536 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-n42pf" event={"ID":"3b8650ca-762b-4a67-a77a-9de3c1fcde65","Type":"ContainerStarted","Data":"7d992575a6f4b66ef84b63232c82048edf5e9843827ed0b2929f79de70677e77"} Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.348580 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-n42pf" event={"ID":"3b8650ca-762b-4a67-a77a-9de3c1fcde65","Type":"ContainerStarted","Data":"7e0fb45d0fc2e5b1c47fc5603274d0a5aab8ca012ba60d6f428edf4d544dccec"} Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.381483 4745 scope.go:117] "RemoveContainer" containerID="1b94136cb49005c78ddbea4a41b41cc6148244a35d21ef150730acc5ceebcffc" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.438656 4745 scope.go:117] "RemoveContainer" containerID="5ced037d64c29fbb7181325f346861b098d7d2cc8331c9c3ccbaa1224f983681" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.474149 4745 scope.go:117] "RemoveContainer" containerID="412f3e67a029261dcc7f9cc94e413b1f49a29610da29af689e52eaaeed8b999a" Dec 08 00:36:20 crc kubenswrapper[4745]: I1208 00:36:20.474597 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-n42pf" podStartSLOduration=4.474573415 podStartE2EDuration="4.474573415s" podCreationTimestamp="2025-12-08 00:36:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-08 00:36:20.466148192 +0000 UTC m=+1735.895354502" watchObservedRunningTime="2025-12-08 00:36:20.474573415 +0000 UTC m=+1735.903779735" Dec 08 00:36:30 crc kubenswrapper[4745]: I1208 00:36:30.883041 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:36:30 crc kubenswrapper[4745]: E1208 00:36:30.886150 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:36:32 crc kubenswrapper[4745]: I1208 00:36:32.882667 4745 scope.go:117] "RemoveContainer" containerID="b4f7d7591cd84b31befa50f12fb19203659ac28c88b7da2252263d8de1c313f1" Dec 08 00:36:32 crc kubenswrapper[4745]: I1208 00:36:32.883134 4745 scope.go:117] "RemoveContainer" containerID="fd73832f99a46f43b13303ad5e28640015ce37af5a1a5f84d132cacc23136248" Dec 08 00:36:32 crc kubenswrapper[4745]: I1208 00:36:32.883307 4745 scope.go:117] "RemoveContainer" containerID="cffbcead4d2cd1351e7c46e427e3527b6d357e4a9ed70188273a1fedcf601a5f" Dec 08 00:36:33 crc kubenswrapper[4745]: I1208 00:36:33.460535 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerStarted","Data":"6671e68b371a705170f4b79475fa1861293fbdc17cbdf693fa297b0aabd24108"} Dec 08 00:36:33 crc kubenswrapper[4745]: I1208 00:36:33.462517 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerStarted","Data":"60b0b862505703914a931f531aaa8ad2dd1486a669ff8cd3f2c2bfb9a8340a14"} Dec 08 00:36:33 crc kubenswrapper[4745]: I1208 00:36:33.464016 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" event={"ID":"28006f01-5d82-4b3c-8a5c-f77fa8f24081","Type":"ContainerStarted","Data":"81e910209e59ab5a95bc7c062745e5a6653d622d5fc01490683b768918f6a752"} Dec 08 00:36:33 crc kubenswrapper[4745]: I1208 00:36:33.882252 4745 scope.go:117] "RemoveContainer" containerID="4106181b0d6914b910a7dd372ae510699e757aa5f3e9a574207c756971bcd432" Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.476819 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" event={"ID":"f0030446-12f9-44fe-a42f-b8645bc0a9e2","Type":"ContainerStarted","Data":"6d7296b9116dd1aa25703f4ef650398d6ff36077a0ab91d5bd128a4142c75250"} Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.480991 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" event={"ID":"a97f4588-f967-49e3-afcd-7c9a950b00ec","Type":"ContainerStarted","Data":"67e940b77887632d9162e6143322654b0795d55077b88552b5bbfbd5622af1f6"} Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.485693 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerStarted","Data":"df0d02b61b10b865cb1ecf75f54d5dfea3fe6318032055aeb793742440f0e3a9"} Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.485743 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" event={"ID":"c2126c0f-717c-4df9-9009-8248c9cd99c4","Type":"ContainerStarted","Data":"58b169808d8b2b06c758e209c4245be31d2565d4a208272eae73b7fcd2e52ebb"} Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.489034 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" event={"ID":"28006f01-5d82-4b3c-8a5c-f77fa8f24081","Type":"ContainerStarted","Data":"969f945050cf7ec58c09bde51a797ab351044613deee1bc15c8520d3ccade85f"} Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.516706 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v" podStartSLOduration=4.298764598 podStartE2EDuration="42.51668564s" podCreationTimestamp="2025-12-08 00:35:52 +0000 UTC" firstStartedPulling="2025-12-08 00:35:55.174569164 +0000 UTC m=+1710.603775474" lastFinishedPulling="2025-12-08 00:36:33.392490216 +0000 UTC m=+1748.821696516" observedRunningTime="2025-12-08 00:36:34.50609313 +0000 UTC m=+1749.935299480" watchObservedRunningTime="2025-12-08 00:36:34.51668564 +0000 UTC m=+1749.945891940" Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.552147 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4" podStartSLOduration=7.290815834 podStartE2EDuration="48.552094146s" podCreationTimestamp="2025-12-08 00:35:46 +0000 UTC" firstStartedPulling="2025-12-08 00:35:52.974179425 +0000 UTC m=+1708.403385735" lastFinishedPulling="2025-12-08 00:36:34.235457737 +0000 UTC m=+1749.664664047" observedRunningTime="2025-12-08 00:36:34.542423781 +0000 UTC m=+1749.971630091" watchObservedRunningTime="2025-12-08 00:36:34.552094146 +0000 UTC m=+1749.981300466" Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.575899 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6" podStartSLOduration=2.866214141 podStartE2EDuration="34.575871375s" podCreationTimestamp="2025-12-08 00:36:00 +0000 UTC" firstStartedPulling="2025-12-08 00:36:01.683556182 +0000 UTC m=+1717.112762482" lastFinishedPulling="2025-12-08 00:36:33.393213396 +0000 UTC m=+1748.822419716" observedRunningTime="2025-12-08 00:36:34.569173278 +0000 UTC m=+1749.998379578" watchObservedRunningTime="2025-12-08 00:36:34.575871375 +0000 UTC m=+1750.005077715" Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.608531 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl" podStartSLOduration=6.33298715 podStartE2EDuration="46.608511617s" podCreationTimestamp="2025-12-08 00:35:48 +0000 UTC" firstStartedPulling="2025-12-08 00:35:53.132340576 +0000 UTC m=+1708.561546876" lastFinishedPulling="2025-12-08 00:36:33.407865033 +0000 UTC m=+1748.837071343" observedRunningTime="2025-12-08 00:36:34.602046047 +0000 UTC m=+1750.031252377" watchObservedRunningTime="2025-12-08 00:36:34.608511617 +0000 UTC m=+1750.037717917" Dec 08 00:36:34 crc kubenswrapper[4745]: I1208 00:36:34.888364 4745 scope.go:117] "RemoveContainer" containerID="5d63a9528e880843a28bd16be3c45dd3d502b0481fb70eaf271fa7db1bf022d2" Dec 08 00:36:35 crc kubenswrapper[4745]: I1208 00:36:35.496423 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" event={"ID":"ae8ae814-8c22-4d63-9907-a4eff4a5f600","Type":"ContainerStarted","Data":"60c55c7750c1168efd8dbacb2db718efd3891f226fcd4f6781adfd1ed9952178"} Dec 08 00:36:35 crc kubenswrapper[4745]: I1208 00:36:35.496484 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" event={"ID":"ae8ae814-8c22-4d63-9907-a4eff4a5f600","Type":"ContainerStarted","Data":"636d9d4f5c7f5ac82d7d7b93164027384f7f8c658e402cd4e907070bd856f5ef"} Dec 08 00:36:35 crc kubenswrapper[4745]: I1208 00:36:35.522561 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f" podStartSLOduration=1.582265486 podStartE2EDuration="34.522542395s" podCreationTimestamp="2025-12-08 00:36:01 +0000 UTC" firstStartedPulling="2025-12-08 00:36:02.275716844 +0000 UTC m=+1717.704923144" lastFinishedPulling="2025-12-08 00:36:35.215993753 +0000 UTC m=+1750.645200053" observedRunningTime="2025-12-08 00:36:35.516572318 +0000 UTC m=+1750.945778618" watchObservedRunningTime="2025-12-08 00:36:35.522542395 +0000 UTC m=+1750.951748695" Dec 08 00:36:37 crc kubenswrapper[4745]: I1208 00:36:37.874866 4745 scope.go:117] "RemoveContainer" containerID="2cf67aeb4fc68d010e7c20138eb98b49c58dcc9ab364d5af0678b1b13943604a" Dec 08 00:36:37 crc kubenswrapper[4745]: I1208 00:36:37.904973 4745 scope.go:117] "RemoveContainer" containerID="211d1e50d86a996bdfec74a91c56e083deee6a38722fc1ed2f6912626e90d924" Dec 08 00:36:41 crc kubenswrapper[4745]: I1208 00:36:41.882879 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:36:41 crc kubenswrapper[4745]: E1208 00:36:41.883512 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.013532 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.014732 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.018452 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.019524 4745 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.031336 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.181649 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.181754 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-qdr-test-config\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.181771 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9thk6\" (UniqueName: \"kubernetes.io/projected/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-kube-api-access-9thk6\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.283128 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.283247 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-qdr-test-config\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.283278 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9thk6\" (UniqueName: \"kubernetes.io/projected/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-kube-api-access-9thk6\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.284915 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-qdr-test-config\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.291784 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.300190 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9thk6\" (UniqueName: \"kubernetes.io/projected/5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9-kube-api-access-9thk6\") pod \"qdr-test\" (UID: \"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9\") " pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.355782 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Dec 08 00:36:48 crc kubenswrapper[4745]: I1208 00:36:48.800633 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Dec 08 00:36:48 crc kubenswrapper[4745]: W1208 00:36:48.802539 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ea48e07_1f9d_4c42_b754_cdfbd8f1caa9.slice/crio-89430e57bc186ec903321db69a6f5b2f72f17fdbde6772a6317df40e686ffcbc WatchSource:0}: Error finding container 89430e57bc186ec903321db69a6f5b2f72f17fdbde6772a6317df40e686ffcbc: Status 404 returned error can't find the container with id 89430e57bc186ec903321db69a6f5b2f72f17fdbde6772a6317df40e686ffcbc Dec 08 00:36:49 crc kubenswrapper[4745]: I1208 00:36:49.624313 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9","Type":"ContainerStarted","Data":"89430e57bc186ec903321db69a6f5b2f72f17fdbde6772a6317df40e686ffcbc"} Dec 08 00:36:55 crc kubenswrapper[4745]: I1208 00:36:55.884473 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:36:55 crc kubenswrapper[4745]: E1208 00:36:55.885763 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:36:57 crc kubenswrapper[4745]: I1208 00:36:57.690488 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9","Type":"ContainerStarted","Data":"d059d9a10327baee26fa06d3fbf718fcbb0740c9d92bc45d753d8c0a54ad3c07"} Dec 08 00:36:57 crc kubenswrapper[4745]: I1208 00:36:57.713919 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=2.823369891 podStartE2EDuration="10.713895192s" podCreationTimestamp="2025-12-08 00:36:47 +0000 UTC" firstStartedPulling="2025-12-08 00:36:48.806694288 +0000 UTC m=+1764.235900618" lastFinishedPulling="2025-12-08 00:36:56.697219599 +0000 UTC m=+1772.126425919" observedRunningTime="2025-12-08 00:36:57.706321451 +0000 UTC m=+1773.135527761" watchObservedRunningTime="2025-12-08 00:36:57.713895192 +0000 UTC m=+1773.143101502" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.046144 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-2fjqk"] Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.048144 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.053939 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-2fjqk"] Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.055418 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.055865 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.056265 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.056483 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.056325 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.058006 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.170294 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-sensubility-config\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.170375 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-healthcheck-log\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.170543 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.170619 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-publisher\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.170711 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.170834 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-config\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.170868 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slkhc\" (UniqueName: \"kubernetes.io/projected/ee3c2950-dd8e-42fa-a22b-b03416435a8b-kube-api-access-slkhc\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.272532 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-config\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.272582 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slkhc\" (UniqueName: \"kubernetes.io/projected/ee3c2950-dd8e-42fa-a22b-b03416435a8b-kube-api-access-slkhc\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.272622 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-sensubility-config\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.272656 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-healthcheck-log\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.272681 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.272708 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-publisher\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.272736 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.273735 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-config\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.273783 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.274417 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-healthcheck-log\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.274553 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.275051 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-sensubility-config\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.275216 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-publisher\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.297104 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slkhc\" (UniqueName: \"kubernetes.io/projected/ee3c2950-dd8e-42fa-a22b-b03416435a8b-kube-api-access-slkhc\") pod \"stf-smoketest-smoke1-2fjqk\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.373167 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.532468 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.533459 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.542722 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.610590 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-2fjqk"] Dec 08 00:36:58 crc kubenswrapper[4745]: W1208 00:36:58.613765 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee3c2950_dd8e_42fa_a22b_b03416435a8b.slice/crio-0ef8bd4e884814f639ab0419cbea6845cd4f07164949225fdbdb1288af6beb7c WatchSource:0}: Error finding container 0ef8bd4e884814f639ab0419cbea6845cd4f07164949225fdbdb1288af6beb7c: Status 404 returned error can't find the container with id 0ef8bd4e884814f639ab0419cbea6845cd4f07164949225fdbdb1288af6beb7c Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.677455 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d84mb\" (UniqueName: \"kubernetes.io/projected/ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d-kube-api-access-d84mb\") pod \"curl\" (UID: \"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d\") " pod="service-telemetry/curl" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.698593 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" event={"ID":"ee3c2950-dd8e-42fa-a22b-b03416435a8b","Type":"ContainerStarted","Data":"0ef8bd4e884814f639ab0419cbea6845cd4f07164949225fdbdb1288af6beb7c"} Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.779181 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d84mb\" (UniqueName: \"kubernetes.io/projected/ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d-kube-api-access-d84mb\") pod \"curl\" (UID: \"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d\") " pod="service-telemetry/curl" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.812550 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d84mb\" (UniqueName: \"kubernetes.io/projected/ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d-kube-api-access-d84mb\") pod \"curl\" (UID: \"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d\") " pod="service-telemetry/curl" Dec 08 00:36:58 crc kubenswrapper[4745]: I1208 00:36:58.858143 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 08 00:36:59 crc kubenswrapper[4745]: I1208 00:36:59.067357 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Dec 08 00:36:59 crc kubenswrapper[4745]: I1208 00:36:59.705916 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d","Type":"ContainerStarted","Data":"fab9467dd33061b9ae90ed9d312042a3a9b37a22997978cc2585377199f02b2b"} Dec 08 00:37:01 crc kubenswrapper[4745]: I1208 00:37:01.724143 4745 generic.go:334] "Generic (PLEG): container finished" podID="ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d" containerID="ef1bdea823cfd17d9022b57968472f25bdee4f6f778fa1bf5b4101efae0b7f79" exitCode=0 Dec 08 00:37:01 crc kubenswrapper[4745]: I1208 00:37:01.724217 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d","Type":"ContainerDied","Data":"ef1bdea823cfd17d9022b57968472f25bdee4f6f778fa1bf5b4101efae0b7f79"} Dec 08 00:37:06 crc kubenswrapper[4745]: I1208 00:37:06.886904 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:37:06 crc kubenswrapper[4745]: E1208 00:37:06.887567 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:37:10 crc kubenswrapper[4745]: E1208 00:37:10.670406 4745 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/tripleomastercentos9/openstack-collectd:current-tripleo" Dec 08 00:37:10 crc kubenswrapper[4745]: E1208 00:37:10.671873 4745 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:smoketest-collectd,Image:quay.io/tripleomastercentos9/openstack-collectd:current-tripleo,Command:[/smoketest_collectd_entrypoint.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CLOUDNAME,Value:smoke1,ValueFrom:nil,},EnvVar{Name:ELASTICSEARCH_AUTH_PASS,Value:vXeibLGcRByNWS9DBkRYgLly,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_AUTH_TOKEN,Value:eyJhbGciOiJSUzI1NiIsImtpZCI6InF6SnFxNFFjbVk5VmJQZ2dNMmUxdHFmTlJlVWx4UDhSTlhIamV3RUx4WU0ifQ.eyJhdWQiOlsiaHR0cHM6Ly9rdWJlcm5ldGVzLmRlZmF1bHQuc3ZjIl0sImV4cCI6MTc2NTE1NzgwMiwiaWF0IjoxNzY1MTU0MjAyLCJpc3MiOiJodHRwczovL2t1YmVybmV0ZXMuZGVmYXVsdC5zdmMiLCJqdGkiOiJjOGU5ZWUwMS1lOTJjLTQwMjEtOWQ2Ni01MzA0OTQzYzZjZjYiLCJrdWJlcm5ldGVzLmlvIjp7Im5hbWVzcGFjZSI6InNlcnZpY2UtdGVsZW1ldHJ5Iiwic2VydmljZWFjY291bnQiOnsibmFtZSI6InN0Zi1wcm9tZXRoZXVzLXJlYWRlciIsInVpZCI6IjQwNjEyNzBiLTQyZGEtNDllYy1hNmRhLWIwZDkxYmJkMTJhNyJ9fSwibmJmIjoxNzY1MTU0MjAyLCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6c2VydmljZS10ZWxlbWV0cnk6c3RmLXByb21ldGhldXMtcmVhZGVyIn0.mL-roUdDX1pOXH_HtXoZlzTu2sc1kRvN9YL1POX6Hied7q4e92T_7cq-xMnV9a9-wgupODYIO12UHhv5uHVJArjImXw812bHhQoqPe2h7ExrxRMrIFFVNbc222i-wrUmHQUrjhPutjOA17zmhLZQcEDnrw8ldPmpXeIY1POKkRjvpS33inf4lKK4oz3Sv-B69mzT_Np7GSfs-JoEY1dcqnVCfuDC252-frsd0DAdn7d8ny2JKLeJV7N7zCMpxVSh2KawNIQqHrntM8TjHT4yVuheNwsWD_-k1_-dbJKqhZAwMDe9eYGXkUTEjtTLvGazL1XmZ3VJY2qAGU0qz03b1czeGYH0zGFvTwJ8fZFmnnB25kF_N6-jvojoyE9El1gRLc6URqEhJHZ2-mByUkRX5rz5r-vrlClvKW_3Qt78rt_jJg-I2ITCNxCmOdhZDsWoDZjxY-kRdI5efjF2_uSrEJtI9SMYw0zMOAs9oxLQeBZEIS7zkx88vl9Qb4cCstGvnCQzdxm7LKW24-kTcdB2OGOEd1nqIo8qo5-mo77h43CRG6El8frAgBgtRrHoAO20fAuCcRdloolsC84W-kkHOHGdETzXNau1p6RU7aidt5enCgZKLxPqzgF3o-pNy_xTSZlu8CSuy5v0p4V46sZIeunEHAok2mB6k6VnHfby1iM,ValueFrom:nil,},EnvVar{Name:OBSERVABILITY_STRATEGY,Value:<>,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:collectd-config,ReadOnly:false,MountPath:/etc/minimal-collectd.conf.template,SubPath:minimal-collectd.conf.template,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sensubility-config,ReadOnly:false,MountPath:/etc/collectd-sensubility.conf,SubPath:collectd-sensubility.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:healthcheck-log,ReadOnly:false,MountPath:/healthcheck.log,SubPath:healthcheck.log,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:collectd-entrypoint-script,ReadOnly:false,MountPath:/smoketest_collectd_entrypoint.sh,SubPath:smoketest_collectd_entrypoint.sh,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-slkhc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod stf-smoketest-smoke1-2fjqk_service-telemetry(ee3c2950-dd8e-42fa-a22b-b03416435a8b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.690962 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.785610 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d","Type":"ContainerDied","Data":"fab9467dd33061b9ae90ed9d312042a3a9b37a22997978cc2585377199f02b2b"} Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.785647 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fab9467dd33061b9ae90ed9d312042a3a9b37a22997978cc2585377199f02b2b" Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.785660 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.868569 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d84mb\" (UniqueName: \"kubernetes.io/projected/ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d-kube-api-access-d84mb\") pod \"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d\" (UID: \"ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d\") " Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.870163 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d/curl/0.log" Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.875145 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d-kube-api-access-d84mb" (OuterVolumeSpecName: "kube-api-access-d84mb") pod "ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d" (UID: "ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d"). InnerVolumeSpecName "kube-api-access-d84mb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:37:10 crc kubenswrapper[4745]: I1208 00:37:10.970092 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d84mb\" (UniqueName: \"kubernetes.io/projected/ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d-kube-api-access-d84mb\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:11 crc kubenswrapper[4745]: I1208 00:37:11.197116 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-jwn74_9a588f99-61be-4bfc-b1b6-c444e06c2ada/prometheus-webhook-snmp/0.log" Dec 08 00:37:19 crc kubenswrapper[4745]: E1208 00:37:19.398323 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" Dec 08 00:37:19 crc kubenswrapper[4745]: I1208 00:37:19.860789 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" event={"ID":"ee3c2950-dd8e-42fa-a22b-b03416435a8b","Type":"ContainerStarted","Data":"fd07280cae4faf153ce0222620958b3e5caa31828e5b81e24f25023ae6896d85"} Dec 08 00:37:19 crc kubenswrapper[4745]: E1208 00:37:19.863444 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/tripleomastercentos9/openstack-collectd:current-tripleo\\\"\"" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" Dec 08 00:37:20 crc kubenswrapper[4745]: I1208 00:37:20.883090 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:37:20 crc kubenswrapper[4745]: E1208 00:37:20.883349 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:37:21 crc kubenswrapper[4745]: I1208 00:37:21.876537 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" event={"ID":"ee3c2950-dd8e-42fa-a22b-b03416435a8b","Type":"ContainerStarted","Data":"f1fe7b7b7899757e0cd69c4885c45dc3c7337b0a346645659035f63e004af359"} Dec 08 00:37:21 crc kubenswrapper[4745]: I1208 00:37:21.899014 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" podStartSLOduration=1.166477692 podStartE2EDuration="23.898997348s" podCreationTimestamp="2025-12-08 00:36:58 +0000 UTC" firstStartedPulling="2025-12-08 00:36:58.616324951 +0000 UTC m=+1774.045531271" lastFinishedPulling="2025-12-08 00:37:21.348844587 +0000 UTC m=+1796.778050927" observedRunningTime="2025-12-08 00:37:21.898049003 +0000 UTC m=+1797.327255313" watchObservedRunningTime="2025-12-08 00:37:21.898997348 +0000 UTC m=+1797.328203648" Dec 08 00:37:33 crc kubenswrapper[4745]: I1208 00:37:33.883195 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:37:33 crc kubenswrapper[4745]: E1208 00:37:33.884161 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:37:37 crc kubenswrapper[4745]: I1208 00:37:37.963375 4745 scope.go:117] "RemoveContainer" containerID="bf4007db0f862a684c66b44d965ed67efd116568842790dd2ac76f5cfb1a0c00" Dec 08 00:37:38 crc kubenswrapper[4745]: I1208 00:37:38.003795 4745 scope.go:117] "RemoveContainer" containerID="68df80bad1ccea0dc5886a0dfd78203f52022c1103851b7c803e177409926d0f" Dec 08 00:37:41 crc kubenswrapper[4745]: I1208 00:37:41.409508 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-jwn74_9a588f99-61be-4bfc-b1b6-c444e06c2ada/prometheus-webhook-snmp/0.log" Dec 08 00:37:48 crc kubenswrapper[4745]: I1208 00:37:48.882354 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:37:48 crc kubenswrapper[4745]: E1208 00:37:48.882972 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:37:51 crc kubenswrapper[4745]: I1208 00:37:51.142457 4745 generic.go:334] "Generic (PLEG): container finished" podID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerID="fd07280cae4faf153ce0222620958b3e5caa31828e5b81e24f25023ae6896d85" exitCode=0 Dec 08 00:37:51 crc kubenswrapper[4745]: I1208 00:37:51.143194 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" event={"ID":"ee3c2950-dd8e-42fa-a22b-b03416435a8b","Type":"ContainerDied","Data":"fd07280cae4faf153ce0222620958b3e5caa31828e5b81e24f25023ae6896d85"} Dec 08 00:37:51 crc kubenswrapper[4745]: I1208 00:37:51.144609 4745 scope.go:117] "RemoveContainer" containerID="fd07280cae4faf153ce0222620958b3e5caa31828e5b81e24f25023ae6896d85" Dec 08 00:37:55 crc kubenswrapper[4745]: I1208 00:37:55.183716 4745 generic.go:334] "Generic (PLEG): container finished" podID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerID="f1fe7b7b7899757e0cd69c4885c45dc3c7337b0a346645659035f63e004af359" exitCode=0 Dec 08 00:37:55 crc kubenswrapper[4745]: I1208 00:37:55.183825 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" event={"ID":"ee3c2950-dd8e-42fa-a22b-b03416435a8b","Type":"ContainerDied","Data":"f1fe7b7b7899757e0cd69c4885c45dc3c7337b0a346645659035f63e004af359"} Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.539411 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.650079 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-sensubility-config\") pod \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.650136 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-config\") pod \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.650179 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-publisher\") pod \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.650279 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slkhc\" (UniqueName: \"kubernetes.io/projected/ee3c2950-dd8e-42fa-a22b-b03416435a8b-kube-api-access-slkhc\") pod \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.650320 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-entrypoint-script\") pod \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.650349 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-healthcheck-log\") pod \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.650386 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-entrypoint-script\") pod \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\" (UID: \"ee3c2950-dd8e-42fa-a22b-b03416435a8b\") " Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.664230 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee3c2950-dd8e-42fa-a22b-b03416435a8b-kube-api-access-slkhc" (OuterVolumeSpecName: "kube-api-access-slkhc") pod "ee3c2950-dd8e-42fa-a22b-b03416435a8b" (UID: "ee3c2950-dd8e-42fa-a22b-b03416435a8b"). InnerVolumeSpecName "kube-api-access-slkhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.669476 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "ee3c2950-dd8e-42fa-a22b-b03416435a8b" (UID: "ee3c2950-dd8e-42fa-a22b-b03416435a8b"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.674230 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "ee3c2950-dd8e-42fa-a22b-b03416435a8b" (UID: "ee3c2950-dd8e-42fa-a22b-b03416435a8b"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.675530 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "ee3c2950-dd8e-42fa-a22b-b03416435a8b" (UID: "ee3c2950-dd8e-42fa-a22b-b03416435a8b"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.682312 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "ee3c2950-dd8e-42fa-a22b-b03416435a8b" (UID: "ee3c2950-dd8e-42fa-a22b-b03416435a8b"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.682868 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "ee3c2950-dd8e-42fa-a22b-b03416435a8b" (UID: "ee3c2950-dd8e-42fa-a22b-b03416435a8b"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.685705 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "ee3c2950-dd8e-42fa-a22b-b03416435a8b" (UID: "ee3c2950-dd8e-42fa-a22b-b03416435a8b"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.754099 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slkhc\" (UniqueName: \"kubernetes.io/projected/ee3c2950-dd8e-42fa-a22b-b03416435a8b-kube-api-access-slkhc\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.754156 4745 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.754179 4745 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-healthcheck-log\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.754197 4745 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.754217 4745 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-sensubility-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.754237 4745 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-collectd-config\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:56 crc kubenswrapper[4745]: I1208 00:37:56.754254 4745 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ee3c2950-dd8e-42fa-a22b-b03416435a8b-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Dec 08 00:37:57 crc kubenswrapper[4745]: I1208 00:37:57.209167 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" event={"ID":"ee3c2950-dd8e-42fa-a22b-b03416435a8b","Type":"ContainerDied","Data":"0ef8bd4e884814f639ab0419cbea6845cd4f07164949225fdbdb1288af6beb7c"} Dec 08 00:37:57 crc kubenswrapper[4745]: I1208 00:37:57.209206 4745 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ef8bd4e884814f639ab0419cbea6845cd4f07164949225fdbdb1288af6beb7c" Dec 08 00:37:57 crc kubenswrapper[4745]: I1208 00:37:57.209304 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-2fjqk" Dec 08 00:37:58 crc kubenswrapper[4745]: I1208 00:37:58.937715 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-2fjqk_ee3c2950-dd8e-42fa-a22b-b03416435a8b/smoketest-collectd/0.log" Dec 08 00:37:59 crc kubenswrapper[4745]: I1208 00:37:59.280970 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-2fjqk_ee3c2950-dd8e-42fa-a22b-b03416435a8b/smoketest-ceilometer/0.log" Dec 08 00:37:59 crc kubenswrapper[4745]: I1208 00:37:59.573860 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-n42pf_3b8650ca-762b-4a67-a77a-9de3c1fcde65/default-interconnect/0.log" Dec 08 00:37:59 crc kubenswrapper[4745]: I1208 00:37:59.888947 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4_c2126c0f-717c-4df9-9009-8248c9cd99c4/bridge/2.log" Dec 08 00:38:00 crc kubenswrapper[4745]: I1208 00:38:00.403096 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-x7sv4_c2126c0f-717c-4df9-9009-8248c9cd99c4/sg-core/0.log" Dec 08 00:38:00 crc kubenswrapper[4745]: I1208 00:38:00.694037 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6_28006f01-5d82-4b3c-8a5c-f77fa8f24081/bridge/2.log" Dec 08 00:38:01 crc kubenswrapper[4745]: I1208 00:38:01.014824 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-8548d99b94-hhzc6_28006f01-5d82-4b3c-8a5c-f77fa8f24081/sg-core/0.log" Dec 08 00:38:01 crc kubenswrapper[4745]: I1208 00:38:01.398658 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl_a97f4588-f967-49e3-afcd-7c9a950b00ec/bridge/2.log" Dec 08 00:38:01 crc kubenswrapper[4745]: I1208 00:38:01.661794 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-ct8dl_a97f4588-f967-49e3-afcd-7c9a950b00ec/sg-core/0.log" Dec 08 00:38:02 crc kubenswrapper[4745]: I1208 00:38:02.023038 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f_ae8ae814-8c22-4d63-9907-a4eff4a5f600/bridge/2.log" Dec 08 00:38:02 crc kubenswrapper[4745]: I1208 00:38:02.352158 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-55f89fddc4-vdh4f_ae8ae814-8c22-4d63-9907-a4eff4a5f600/sg-core/0.log" Dec 08 00:38:02 crc kubenswrapper[4745]: I1208 00:38:02.710004 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v_f0030446-12f9-44fe-a42f-b8645bc0a9e2/bridge/2.log" Dec 08 00:38:03 crc kubenswrapper[4745]: I1208 00:38:03.050636 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-4mm6v_f0030446-12f9-44fe-a42f-b8645bc0a9e2/sg-core/0.log" Dec 08 00:38:03 crc kubenswrapper[4745]: I1208 00:38:03.909365 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:38:03 crc kubenswrapper[4745]: E1208 00:38:03.909647 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:38:05 crc kubenswrapper[4745]: I1208 00:38:05.243425 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-599b4778bd-f4452_16b0d4ba-0de0-4171-be83-e536a3199c16/operator/0.log" Dec 08 00:38:05 crc kubenswrapper[4745]: I1208 00:38:05.565555 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_16fb1bc8-8e50-4d0b-ae02-040249f1bf88/prometheus/0.log" Dec 08 00:38:05 crc kubenswrapper[4745]: I1208 00:38:05.900533 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_c18b8080-8e69-4234-9e9d-7ec8cacb62a3/elasticsearch/0.log" Dec 08 00:38:06 crc kubenswrapper[4745]: I1208 00:38:06.223300 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-jwn74_9a588f99-61be-4bfc-b1b6-c444e06c2ada/prometheus-webhook-snmp/0.log" Dec 08 00:38:06 crc kubenswrapper[4745]: I1208 00:38:06.576633 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_58680707-df73-4d46-8148-5410ac829436/alertmanager/0.log" Dec 08 00:38:18 crc kubenswrapper[4745]: I1208 00:38:18.882822 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:38:18 crc kubenswrapper[4745]: E1208 00:38:18.884303 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:38:24 crc kubenswrapper[4745]: I1208 00:38:24.144531 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-8667fccdb7-8fsfw_5d411af2-29cc-431a-a376-9031915ed5c1/operator/0.log" Dec 08 00:38:26 crc kubenswrapper[4745]: I1208 00:38:26.223714 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-599b4778bd-f4452_16b0d4ba-0de0-4171-be83-e536a3199c16/operator/0.log" Dec 08 00:38:26 crc kubenswrapper[4745]: I1208 00:38:26.563618 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_5ea48e07-1f9d-4c42-b754-cdfbd8f1caa9/qdr/0.log" Dec 08 00:38:30 crc kubenswrapper[4745]: I1208 00:38:30.883532 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:38:30 crc kubenswrapper[4745]: E1208 00:38:30.884741 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:38:38 crc kubenswrapper[4745]: I1208 00:38:38.067905 4745 scope.go:117] "RemoveContainer" containerID="bded26f29f0617bd4f0a6d44f392e603869b2f8d7c45c93c1eeb08f6be0d8c99" Dec 08 00:38:38 crc kubenswrapper[4745]: I1208 00:38:38.103638 4745 scope.go:117] "RemoveContainer" containerID="17835e59611acc16836138393fbd1c8a2746ac280b493b7e4742d08a8611e9de" Dec 08 00:38:43 crc kubenswrapper[4745]: I1208 00:38:43.883359 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:38:43 crc kubenswrapper[4745]: E1208 00:38:43.884317 4745 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6czdv_openshift-machine-config-operator(44f083ce-ad64-45d5-971c-eca93c5bddd6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.244207 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-fcgwl/must-gather-2gpjs"] Dec 08 00:38:53 crc kubenswrapper[4745]: E1208 00:38:53.245171 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerName="smoketest-collectd" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.245188 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerName="smoketest-collectd" Dec 08 00:38:53 crc kubenswrapper[4745]: E1208 00:38:53.245206 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d" containerName="curl" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.245215 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d" containerName="curl" Dec 08 00:38:53 crc kubenswrapper[4745]: E1208 00:38:53.245228 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerName="smoketest-ceilometer" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.245236 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerName="smoketest-ceilometer" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.245382 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerName="smoketest-collectd" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.245400 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee3c2950-dd8e-42fa-a22b-b03416435a8b" containerName="smoketest-ceilometer" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.245415 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff47ea5f-33ed-42dc-9c79-e9bd29be0b2d" containerName="curl" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.246249 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.254076 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-fcgwl/must-gather-2gpjs"] Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.259081 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-fcgwl"/"openshift-service-ca.crt" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.264366 4745 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-fcgwl"/"default-dockercfg-bcf6j" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.264620 4745 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-fcgwl"/"kube-root-ca.crt" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.342591 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-must-gather-output\") pod \"must-gather-2gpjs\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.342645 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqqf9\" (UniqueName: \"kubernetes.io/projected/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-kube-api-access-wqqf9\") pod \"must-gather-2gpjs\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.444279 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-must-gather-output\") pod \"must-gather-2gpjs\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.444333 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqqf9\" (UniqueName: \"kubernetes.io/projected/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-kube-api-access-wqqf9\") pod \"must-gather-2gpjs\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.445514 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-must-gather-output\") pod \"must-gather-2gpjs\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.464502 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqqf9\" (UniqueName: \"kubernetes.io/projected/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-kube-api-access-wqqf9\") pod \"must-gather-2gpjs\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:53 crc kubenswrapper[4745]: I1208 00:38:53.560430 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:38:54 crc kubenswrapper[4745]: I1208 00:38:54.025518 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-fcgwl/must-gather-2gpjs"] Dec 08 00:38:54 crc kubenswrapper[4745]: I1208 00:38:54.967678 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" event={"ID":"04ea437b-ff46-4fe6-9b1e-9b1f7980027a","Type":"ContainerStarted","Data":"8ed2f1fa175a599807f79ea6f0d32f6c015e207919b2d117370f880d756dfd9e"} Dec 08 00:38:55 crc kubenswrapper[4745]: I1208 00:38:55.882500 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:39:04 crc kubenswrapper[4745]: I1208 00:39:04.043494 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"786043d809c1dfa004a98546906f84e535192b8c62f081b960cccf9d1884da81"} Dec 08 00:39:04 crc kubenswrapper[4745]: I1208 00:39:04.048803 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" event={"ID":"04ea437b-ff46-4fe6-9b1e-9b1f7980027a","Type":"ContainerStarted","Data":"01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4"} Dec 08 00:39:04 crc kubenswrapper[4745]: I1208 00:39:04.048865 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" event={"ID":"04ea437b-ff46-4fe6-9b1e-9b1f7980027a","Type":"ContainerStarted","Data":"de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223"} Dec 08 00:39:04 crc kubenswrapper[4745]: I1208 00:39:04.101568 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" podStartSLOduration=2.109464638 podStartE2EDuration="11.101534686s" podCreationTimestamp="2025-12-08 00:38:53 +0000 UTC" firstStartedPulling="2025-12-08 00:38:54.030468689 +0000 UTC m=+1889.459675009" lastFinishedPulling="2025-12-08 00:39:03.022538757 +0000 UTC m=+1898.451745057" observedRunningTime="2025-12-08 00:39:04.099256825 +0000 UTC m=+1899.528463155" watchObservedRunningTime="2025-12-08 00:39:04.101534686 +0000 UTC m=+1899.530741026" Dec 08 00:39:48 crc kubenswrapper[4745]: I1208 00:39:48.611464 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-nsh9z_27dbf6ed-7b15-4ddd-84b0-83fa2f178c63/control-plane-machine-set-operator/0.log" Dec 08 00:39:48 crc kubenswrapper[4745]: I1208 00:39:48.729385 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dktn5_d4a755dd-bcbd-4cf0-a396-673809d92250/kube-rbac-proxy/0.log" Dec 08 00:39:48 crc kubenswrapper[4745]: I1208 00:39:48.816401 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dktn5_d4a755dd-bcbd-4cf0-a396-673809d92250/machine-api-operator/0.log" Dec 08 00:40:01 crc kubenswrapper[4745]: I1208 00:40:01.911278 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-qhxds_efcc61c0-a81a-405d-b902-029423222c50/cert-manager-controller/0.log" Dec 08 00:40:02 crc kubenswrapper[4745]: I1208 00:40:02.067439 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-zn2dd_589ab375-83a9-43be-a945-92245f00f756/cert-manager-cainjector/0.log" Dec 08 00:40:02 crc kubenswrapper[4745]: I1208 00:40:02.102072 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-p4kck_c493caf0-3e42-4c34-8574-a46e6fa2db3f/cert-manager-webhook/0.log" Dec 08 00:40:18 crc kubenswrapper[4745]: I1208 00:40:18.705873 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6_02dc9d5d-28eb-4c8e-af70-445fc2a8214e/util/0.log" Dec 08 00:40:18 crc kubenswrapper[4745]: I1208 00:40:18.919711 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6_02dc9d5d-28eb-4c8e-af70-445fc2a8214e/util/0.log" Dec 08 00:40:18 crc kubenswrapper[4745]: I1208 00:40:18.949498 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6_02dc9d5d-28eb-4c8e-af70-445fc2a8214e/pull/0.log" Dec 08 00:40:18 crc kubenswrapper[4745]: I1208 00:40:18.950321 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6_02dc9d5d-28eb-4c8e-af70-445fc2a8214e/pull/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.110134 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6_02dc9d5d-28eb-4c8e-af70-445fc2a8214e/util/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.149206 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6_02dc9d5d-28eb-4c8e-af70-445fc2a8214e/pull/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.193850 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amh6k6_02dc9d5d-28eb-4c8e-af70-445fc2a8214e/extract/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.317035 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s_f2525385-23f5-47c8-aee0-d56f22c34f7e/util/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.404108 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s_f2525385-23f5-47c8-aee0-d56f22c34f7e/util/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.538472 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s_f2525385-23f5-47c8-aee0-d56f22c34f7e/pull/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.542416 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s_f2525385-23f5-47c8-aee0-d56f22c34f7e/pull/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.761535 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s_f2525385-23f5-47c8-aee0-d56f22c34f7e/pull/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.768091 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s_f2525385-23f5-47c8-aee0-d56f22c34f7e/extract/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.786200 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92105mk4s_f2525385-23f5-47c8-aee0-d56f22c34f7e/util/0.log" Dec 08 00:40:19 crc kubenswrapper[4745]: I1208 00:40:19.961265 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46_eb550ae0-c84f-4ddc-a717-c54f7cbf63bd/util/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.109722 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46_eb550ae0-c84f-4ddc-a717-c54f7cbf63bd/pull/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.132483 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46_eb550ae0-c84f-4ddc-a717-c54f7cbf63bd/pull/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.137660 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46_eb550ae0-c84f-4ddc-a717-c54f7cbf63bd/util/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.300772 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46_eb550ae0-c84f-4ddc-a717-c54f7cbf63bd/pull/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.318483 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46_eb550ae0-c84f-4ddc-a717-c54f7cbf63bd/util/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.331047 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fftl46_eb550ae0-c84f-4ddc-a717-c54f7cbf63bd/extract/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.499386 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl_1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a/util/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.648758 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl_1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a/pull/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.658502 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl_1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a/pull/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.701785 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl_1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a/util/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.850252 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl_1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a/pull/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.861460 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl_1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a/util/0.log" Dec 08 00:40:20 crc kubenswrapper[4745]: I1208 00:40:20.909355 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5efpngl_1fd8e5bb-a3a3-4a3f-a26b-6768856dc30a/extract/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.009005 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z697b_448a7524-1168-4f41-a50c-48e06c2440d5/extract-utilities/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.174813 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z697b_448a7524-1168-4f41-a50c-48e06c2440d5/extract-content/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.189465 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z697b_448a7524-1168-4f41-a50c-48e06c2440d5/extract-content/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.200332 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z697b_448a7524-1168-4f41-a50c-48e06c2440d5/extract-utilities/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.366209 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z697b_448a7524-1168-4f41-a50c-48e06c2440d5/extract-content/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.373782 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z697b_448a7524-1168-4f41-a50c-48e06c2440d5/extract-utilities/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.530639 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z697b_448a7524-1168-4f41-a50c-48e06c2440d5/registry-server/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.566006 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlt2t_e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6/extract-utilities/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.706641 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlt2t_e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6/extract-content/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.743755 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlt2t_e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6/extract-utilities/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.747194 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlt2t_e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6/extract-content/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.862231 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlt2t_e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6/extract-content/0.log" Dec 08 00:40:21 crc kubenswrapper[4745]: I1208 00:40:21.870827 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlt2t_e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6/extract-utilities/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.008827 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-sz9zf_c5de8b9e-5510-4473-bd7b-e105e794b3be/marketplace-operator/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.127591 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4x96l_0d07f882-dd50-42e8-8619-1acf9a483dc2/extract-utilities/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.158120 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlt2t_e08bdc54-d5cc-4fa7-8d12-9a005c8d3ce6/registry-server/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.259443 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4x96l_0d07f882-dd50-42e8-8619-1acf9a483dc2/extract-utilities/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.297719 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4x96l_0d07f882-dd50-42e8-8619-1acf9a483dc2/extract-content/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.320135 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4x96l_0d07f882-dd50-42e8-8619-1acf9a483dc2/extract-content/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.463643 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4x96l_0d07f882-dd50-42e8-8619-1acf9a483dc2/extract-utilities/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.523387 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4x96l_0d07f882-dd50-42e8-8619-1acf9a483dc2/extract-content/0.log" Dec 08 00:40:22 crc kubenswrapper[4745]: I1208 00:40:22.644355 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4x96l_0d07f882-dd50-42e8-8619-1acf9a483dc2/registry-server/0.log" Dec 08 00:40:35 crc kubenswrapper[4745]: I1208 00:40:35.777395 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-lrvkr_a2fa94d7-daa5-4465-a657-7d48ec101d98/prometheus-operator/0.log" Dec 08 00:40:35 crc kubenswrapper[4745]: I1208 00:40:35.944093 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5d96574fff-h4ft2_66dacf9f-e094-4d4a-ab15-c8d2da21d334/prometheus-operator-admission-webhook/0.log" Dec 08 00:40:35 crc kubenswrapper[4745]: I1208 00:40:35.988731 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-5d96574fff-hk8d6_dc180ed4-bc7e-4a33-ba55-def51f0edd4e/prometheus-operator-admission-webhook/0.log" Dec 08 00:40:36 crc kubenswrapper[4745]: I1208 00:40:36.140118 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-bl7n7_ee524a4b-6873-4801-af1a-955b4c28dd27/operator/0.log" Dec 08 00:40:36 crc kubenswrapper[4745]: I1208 00:40:36.140524 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-drjd7_367235e5-1c6a-42e1-b7fa-39dd81931cb9/perses-operator/0.log" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.081690 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2vmlf"] Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.083838 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.095760 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2vmlf"] Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.139065 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vczs\" (UniqueName: \"kubernetes.io/projected/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-kube-api-access-4vczs\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.139132 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-utilities\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.139189 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-catalog-content\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.240364 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-catalog-content\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.240526 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vczs\" (UniqueName: \"kubernetes.io/projected/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-kube-api-access-4vczs\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.240600 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-utilities\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.241232 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-catalog-content\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.241243 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-utilities\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.260879 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vczs\" (UniqueName: \"kubernetes.io/projected/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-kube-api-access-4vczs\") pod \"redhat-operators-2vmlf\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.426342 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.895283 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2vmlf"] Dec 08 00:40:46 crc kubenswrapper[4745]: I1208 00:40:46.938057 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vmlf" event={"ID":"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad","Type":"ContainerStarted","Data":"77f9f0011d5d30361e8ff5be96089d179416c7f14117b9afffe40c2b08a17371"} Dec 08 00:40:47 crc kubenswrapper[4745]: I1208 00:40:47.951921 4745 generic.go:334] "Generic (PLEG): container finished" podID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerID="fa146f244fb27c5236fa58f26b87e980ced4f184c45d80aba99f168bf8f9eaf2" exitCode=0 Dec 08 00:40:47 crc kubenswrapper[4745]: I1208 00:40:47.952010 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vmlf" event={"ID":"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad","Type":"ContainerDied","Data":"fa146f244fb27c5236fa58f26b87e980ced4f184c45d80aba99f168bf8f9eaf2"} Dec 08 00:40:47 crc kubenswrapper[4745]: I1208 00:40:47.954342 4745 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 08 00:40:48 crc kubenswrapper[4745]: I1208 00:40:48.982494 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vmlf" event={"ID":"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad","Type":"ContainerStarted","Data":"72809fa32c441a0038a3363dbf428c522039dc04e87734d8864ad45b018f9dbf"} Dec 08 00:40:49 crc kubenswrapper[4745]: I1208 00:40:49.998259 4745 generic.go:334] "Generic (PLEG): container finished" podID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerID="72809fa32c441a0038a3363dbf428c522039dc04e87734d8864ad45b018f9dbf" exitCode=0 Dec 08 00:40:49 crc kubenswrapper[4745]: I1208 00:40:49.998329 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vmlf" event={"ID":"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad","Type":"ContainerDied","Data":"72809fa32c441a0038a3363dbf428c522039dc04e87734d8864ad45b018f9dbf"} Dec 08 00:40:51 crc kubenswrapper[4745]: I1208 00:40:51.008775 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vmlf" event={"ID":"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad","Type":"ContainerStarted","Data":"c9c58889d666e7e661874c34ba55327a374f9dbfe7d442e234ac3bdaf7f55e8f"} Dec 08 00:40:51 crc kubenswrapper[4745]: I1208 00:40:51.041311 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2vmlf" podStartSLOduration=2.5506264 podStartE2EDuration="5.041292502s" podCreationTimestamp="2025-12-08 00:40:46 +0000 UTC" firstStartedPulling="2025-12-08 00:40:47.953984323 +0000 UTC m=+2003.383190653" lastFinishedPulling="2025-12-08 00:40:50.444650425 +0000 UTC m=+2005.873856755" observedRunningTime="2025-12-08 00:40:51.036809613 +0000 UTC m=+2006.466015923" watchObservedRunningTime="2025-12-08 00:40:51.041292502 +0000 UTC m=+2006.470498802" Dec 08 00:40:56 crc kubenswrapper[4745]: I1208 00:40:56.427445 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:56 crc kubenswrapper[4745]: I1208 00:40:56.428152 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:40:57 crc kubenswrapper[4745]: I1208 00:40:57.480705 4745 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2vmlf" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="registry-server" probeResult="failure" output=< Dec 08 00:40:57 crc kubenswrapper[4745]: timeout: failed to connect service ":50051" within 1s Dec 08 00:40:57 crc kubenswrapper[4745]: > Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.297369 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hcbvj"] Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.299142 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.314441 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hcbvj"] Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.353314 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-utilities\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.353374 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7x4z\" (UniqueName: \"kubernetes.io/projected/dd9d0126-232f-449b-8ce5-375568eb73ec-kube-api-access-q7x4z\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.353597 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-catalog-content\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.455420 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-utilities\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.455471 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7x4z\" (UniqueName: \"kubernetes.io/projected/dd9d0126-232f-449b-8ce5-375568eb73ec-kube-api-access-q7x4z\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.455536 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-catalog-content\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.456075 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-utilities\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.456120 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-catalog-content\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.486113 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7x4z\" (UniqueName: \"kubernetes.io/projected/dd9d0126-232f-449b-8ce5-375568eb73ec-kube-api-access-q7x4z\") pod \"certified-operators-hcbvj\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:05 crc kubenswrapper[4745]: I1208 00:41:05.618646 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:06 crc kubenswrapper[4745]: I1208 00:41:06.172959 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hcbvj"] Dec 08 00:41:06 crc kubenswrapper[4745]: W1208 00:41:06.201105 4745 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd9d0126_232f_449b_8ce5_375568eb73ec.slice/crio-232dbd8a9610651f1c18295d4939caa213c78a86252aabb2ee37f4d65cbafebf WatchSource:0}: Error finding container 232dbd8a9610651f1c18295d4939caa213c78a86252aabb2ee37f4d65cbafebf: Status 404 returned error can't find the container with id 232dbd8a9610651f1c18295d4939caa213c78a86252aabb2ee37f4d65cbafebf Dec 08 00:41:06 crc kubenswrapper[4745]: I1208 00:41:06.499396 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:41:06 crc kubenswrapper[4745]: I1208 00:41:06.559580 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:41:07 crc kubenswrapper[4745]: I1208 00:41:07.144613 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbvj" event={"ID":"dd9d0126-232f-449b-8ce5-375568eb73ec","Type":"ContainerStarted","Data":"232dbd8a9610651f1c18295d4939caa213c78a86252aabb2ee37f4d65cbafebf"} Dec 08 00:41:08 crc kubenswrapper[4745]: I1208 00:41:08.178025 4745 generic.go:334] "Generic (PLEG): container finished" podID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerID="80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2" exitCode=0 Dec 08 00:41:08 crc kubenswrapper[4745]: I1208 00:41:08.178842 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbvj" event={"ID":"dd9d0126-232f-449b-8ce5-375568eb73ec","Type":"ContainerDied","Data":"80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2"} Dec 08 00:41:08 crc kubenswrapper[4745]: I1208 00:41:08.872737 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2vmlf"] Dec 08 00:41:08 crc kubenswrapper[4745]: I1208 00:41:08.873806 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2vmlf" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="registry-server" containerID="cri-o://c9c58889d666e7e661874c34ba55327a374f9dbfe7d442e234ac3bdaf7f55e8f" gracePeriod=2 Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.192037 4745 generic.go:334] "Generic (PLEG): container finished" podID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerID="c9c58889d666e7e661874c34ba55327a374f9dbfe7d442e234ac3bdaf7f55e8f" exitCode=0 Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.192103 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vmlf" event={"ID":"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad","Type":"ContainerDied","Data":"c9c58889d666e7e661874c34ba55327a374f9dbfe7d442e234ac3bdaf7f55e8f"} Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.194681 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbvj" event={"ID":"dd9d0126-232f-449b-8ce5-375568eb73ec","Type":"ContainerStarted","Data":"704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b"} Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.262382 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.337122 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-utilities\") pod \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.337205 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vczs\" (UniqueName: \"kubernetes.io/projected/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-kube-api-access-4vczs\") pod \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.337376 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-catalog-content\") pod \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\" (UID: \"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad\") " Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.338535 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-utilities" (OuterVolumeSpecName: "utilities") pod "f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" (UID: "f850cb8a-64ed-4ba4-83ec-d1b86125b1ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.343214 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-kube-api-access-4vczs" (OuterVolumeSpecName: "kube-api-access-4vczs") pod "f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" (UID: "f850cb8a-64ed-4ba4-83ec-d1b86125b1ad"). InnerVolumeSpecName "kube-api-access-4vczs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.441710 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.441752 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vczs\" (UniqueName: \"kubernetes.io/projected/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-kube-api-access-4vczs\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.449615 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" (UID: "f850cb8a-64ed-4ba4-83ec-d1b86125b1ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:41:09 crc kubenswrapper[4745]: I1208 00:41:09.551970 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.205022 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vmlf" event={"ID":"f850cb8a-64ed-4ba4-83ec-d1b86125b1ad","Type":"ContainerDied","Data":"77f9f0011d5d30361e8ff5be96089d179416c7f14117b9afffe40c2b08a17371"} Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.205079 4745 scope.go:117] "RemoveContainer" containerID="c9c58889d666e7e661874c34ba55327a374f9dbfe7d442e234ac3bdaf7f55e8f" Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.205048 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vmlf" Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.209118 4745 generic.go:334] "Generic (PLEG): container finished" podID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerID="704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b" exitCode=0 Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.209163 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbvj" event={"ID":"dd9d0126-232f-449b-8ce5-375568eb73ec","Type":"ContainerDied","Data":"704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b"} Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.237194 4745 scope.go:117] "RemoveContainer" containerID="72809fa32c441a0038a3363dbf428c522039dc04e87734d8864ad45b018f9dbf" Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.247199 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2vmlf"] Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.253227 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2vmlf"] Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.288166 4745 scope.go:117] "RemoveContainer" containerID="fa146f244fb27c5236fa58f26b87e980ced4f184c45d80aba99f168bf8f9eaf2" Dec 08 00:41:10 crc kubenswrapper[4745]: I1208 00:41:10.898878 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" path="/var/lib/kubelet/pods/f850cb8a-64ed-4ba4-83ec-d1b86125b1ad/volumes" Dec 08 00:41:11 crc kubenswrapper[4745]: I1208 00:41:11.226048 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbvj" event={"ID":"dd9d0126-232f-449b-8ce5-375568eb73ec","Type":"ContainerStarted","Data":"31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740"} Dec 08 00:41:11 crc kubenswrapper[4745]: I1208 00:41:11.260108 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hcbvj" podStartSLOduration=3.823679138 podStartE2EDuration="6.260078031s" podCreationTimestamp="2025-12-08 00:41:05 +0000 UTC" firstStartedPulling="2025-12-08 00:41:08.18119774 +0000 UTC m=+2023.610404080" lastFinishedPulling="2025-12-08 00:41:10.617596673 +0000 UTC m=+2026.046802973" observedRunningTime="2025-12-08 00:41:11.256280863 +0000 UTC m=+2026.685487213" watchObservedRunningTime="2025-12-08 00:41:11.260078031 +0000 UTC m=+2026.689284391" Dec 08 00:41:15 crc kubenswrapper[4745]: I1208 00:41:15.618794 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:15 crc kubenswrapper[4745]: I1208 00:41:15.621447 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:15 crc kubenswrapper[4745]: I1208 00:41:15.695638 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:16 crc kubenswrapper[4745]: I1208 00:41:16.332997 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:16 crc kubenswrapper[4745]: I1208 00:41:16.936799 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hcbvj"] Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.306034 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hcbvj" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="registry-server" containerID="cri-o://31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740" gracePeriod=2 Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.741465 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.802327 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7x4z\" (UniqueName: \"kubernetes.io/projected/dd9d0126-232f-449b-8ce5-375568eb73ec-kube-api-access-q7x4z\") pod \"dd9d0126-232f-449b-8ce5-375568eb73ec\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.808083 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd9d0126-232f-449b-8ce5-375568eb73ec-kube-api-access-q7x4z" (OuterVolumeSpecName: "kube-api-access-q7x4z") pod "dd9d0126-232f-449b-8ce5-375568eb73ec" (UID: "dd9d0126-232f-449b-8ce5-375568eb73ec"). InnerVolumeSpecName "kube-api-access-q7x4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.802466 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-catalog-content\") pod \"dd9d0126-232f-449b-8ce5-375568eb73ec\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.813235 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-utilities\") pod \"dd9d0126-232f-449b-8ce5-375568eb73ec\" (UID: \"dd9d0126-232f-449b-8ce5-375568eb73ec\") " Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.813719 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7x4z\" (UniqueName: \"kubernetes.io/projected/dd9d0126-232f-449b-8ce5-375568eb73ec-kube-api-access-q7x4z\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.814442 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-utilities" (OuterVolumeSpecName: "utilities") pod "dd9d0126-232f-449b-8ce5-375568eb73ec" (UID: "dd9d0126-232f-449b-8ce5-375568eb73ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.872470 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd9d0126-232f-449b-8ce5-375568eb73ec" (UID: "dd9d0126-232f-449b-8ce5-375568eb73ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.914823 4745 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:18 crc kubenswrapper[4745]: I1208 00:41:18.914979 4745 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9d0126-232f-449b-8ce5-375568eb73ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.322738 4745 generic.go:334] "Generic (PLEG): container finished" podID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerID="31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740" exitCode=0 Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.322794 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbvj" event={"ID":"dd9d0126-232f-449b-8ce5-375568eb73ec","Type":"ContainerDied","Data":"31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740"} Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.322825 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hcbvj" event={"ID":"dd9d0126-232f-449b-8ce5-375568eb73ec","Type":"ContainerDied","Data":"232dbd8a9610651f1c18295d4939caa213c78a86252aabb2ee37f4d65cbafebf"} Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.322851 4745 scope.go:117] "RemoveContainer" containerID="31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.323012 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hcbvj" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.358615 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hcbvj"] Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.361577 4745 scope.go:117] "RemoveContainer" containerID="704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.371230 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hcbvj"] Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.384721 4745 scope.go:117] "RemoveContainer" containerID="80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.431122 4745 scope.go:117] "RemoveContainer" containerID="31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740" Dec 08 00:41:19 crc kubenswrapper[4745]: E1208 00:41:19.431553 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740\": container with ID starting with 31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740 not found: ID does not exist" containerID="31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.431599 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740"} err="failed to get container status \"31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740\": rpc error: code = NotFound desc = could not find container \"31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740\": container with ID starting with 31375deb800fdb20e2b24f23bbb811faf75d758858e3df550d441290d355c740 not found: ID does not exist" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.431625 4745 scope.go:117] "RemoveContainer" containerID="704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b" Dec 08 00:41:19 crc kubenswrapper[4745]: E1208 00:41:19.432032 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b\": container with ID starting with 704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b not found: ID does not exist" containerID="704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.432112 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b"} err="failed to get container status \"704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b\": rpc error: code = NotFound desc = could not find container \"704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b\": container with ID starting with 704ecc0cb487e90c7891602a964209900dee55602f3324b17684841a6e92b52b not found: ID does not exist" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.432151 4745 scope.go:117] "RemoveContainer" containerID="80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2" Dec 08 00:41:19 crc kubenswrapper[4745]: E1208 00:41:19.432544 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2\": container with ID starting with 80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2 not found: ID does not exist" containerID="80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2" Dec 08 00:41:19 crc kubenswrapper[4745]: I1208 00:41:19.432573 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2"} err="failed to get container status \"80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2\": rpc error: code = NotFound desc = could not find container \"80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2\": container with ID starting with 80813ef6234ba5927461bac76c4f4e08a013f3a8837209e7ef1c6018ec6d81b2 not found: ID does not exist" Dec 08 00:41:20 crc kubenswrapper[4745]: I1208 00:41:20.893162 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" path="/var/lib/kubelet/pods/dd9d0126-232f-449b-8ce5-375568eb73ec/volumes" Dec 08 00:41:21 crc kubenswrapper[4745]: I1208 00:41:21.363771 4745 generic.go:334] "Generic (PLEG): container finished" podID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerID="de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223" exitCode=0 Dec 08 00:41:21 crc kubenswrapper[4745]: I1208 00:41:21.363873 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" event={"ID":"04ea437b-ff46-4fe6-9b1e-9b1f7980027a","Type":"ContainerDied","Data":"de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223"} Dec 08 00:41:21 crc kubenswrapper[4745]: I1208 00:41:21.365786 4745 scope.go:117] "RemoveContainer" containerID="de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223" Dec 08 00:41:21 crc kubenswrapper[4745]: I1208 00:41:21.464534 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fcgwl_must-gather-2gpjs_04ea437b-ff46-4fe6-9b1e-9b1f7980027a/gather/0.log" Dec 08 00:41:22 crc kubenswrapper[4745]: I1208 00:41:22.460163 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:41:22 crc kubenswrapper[4745]: I1208 00:41:22.460232 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:41:28 crc kubenswrapper[4745]: I1208 00:41:28.660484 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-fcgwl/must-gather-2gpjs"] Dec 08 00:41:28 crc kubenswrapper[4745]: I1208 00:41:28.661389 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerName="copy" containerID="cri-o://01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4" gracePeriod=2 Dec 08 00:41:28 crc kubenswrapper[4745]: I1208 00:41:28.669830 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-fcgwl/must-gather-2gpjs"] Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.032405 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fcgwl_must-gather-2gpjs_04ea437b-ff46-4fe6-9b1e-9b1f7980027a/copy/0.log" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.033038 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.098803 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-must-gather-output\") pod \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.098868 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqqf9\" (UniqueName: \"kubernetes.io/projected/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-kube-api-access-wqqf9\") pod \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\" (UID: \"04ea437b-ff46-4fe6-9b1e-9b1f7980027a\") " Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.104176 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-kube-api-access-wqqf9" (OuterVolumeSpecName: "kube-api-access-wqqf9") pod "04ea437b-ff46-4fe6-9b1e-9b1f7980027a" (UID: "04ea437b-ff46-4fe6-9b1e-9b1f7980027a"). InnerVolumeSpecName "kube-api-access-wqqf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.144307 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "04ea437b-ff46-4fe6-9b1e-9b1f7980027a" (UID: "04ea437b-ff46-4fe6-9b1e-9b1f7980027a"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.200318 4745 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.200370 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqqf9\" (UniqueName: \"kubernetes.io/projected/04ea437b-ff46-4fe6-9b1e-9b1f7980027a-kube-api-access-wqqf9\") on node \"crc\" DevicePath \"\"" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.443287 4745 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-fcgwl_must-gather-2gpjs_04ea437b-ff46-4fe6-9b1e-9b1f7980027a/copy/0.log" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.443801 4745 generic.go:334] "Generic (PLEG): container finished" podID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerID="01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4" exitCode=143 Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.443866 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-fcgwl/must-gather-2gpjs" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.443870 4745 scope.go:117] "RemoveContainer" containerID="01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.468401 4745 scope.go:117] "RemoveContainer" containerID="de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.521064 4745 scope.go:117] "RemoveContainer" containerID="01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4" Dec 08 00:41:29 crc kubenswrapper[4745]: E1208 00:41:29.521564 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4\": container with ID starting with 01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4 not found: ID does not exist" containerID="01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.521618 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4"} err="failed to get container status \"01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4\": rpc error: code = NotFound desc = could not find container \"01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4\": container with ID starting with 01eaaa36621a638b31ab89e4d4cf34aaf2814fa3685e1f394dedbae22cb4aca4 not found: ID does not exist" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.521649 4745 scope.go:117] "RemoveContainer" containerID="de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223" Dec 08 00:41:29 crc kubenswrapper[4745]: E1208 00:41:29.522048 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223\": container with ID starting with de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223 not found: ID does not exist" containerID="de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223" Dec 08 00:41:29 crc kubenswrapper[4745]: I1208 00:41:29.522097 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223"} err="failed to get container status \"de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223\": rpc error: code = NotFound desc = could not find container \"de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223\": container with ID starting with de032e33a96b82e7473aa23ae3869da3f50c9984d6b1cee0b6c4f9bca8ae2223 not found: ID does not exist" Dec 08 00:41:30 crc kubenswrapper[4745]: I1208 00:41:30.899518 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" path="/var/lib/kubelet/pods/04ea437b-ff46-4fe6-9b1e-9b1f7980027a/volumes" Dec 08 00:41:52 crc kubenswrapper[4745]: I1208 00:41:52.461149 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:41:52 crc kubenswrapper[4745]: I1208 00:41:52.461802 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.461008 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.461727 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.461786 4745 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.462406 4745 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"786043d809c1dfa004a98546906f84e535192b8c62f081b960cccf9d1884da81"} pod="openshift-machine-config-operator/machine-config-daemon-6czdv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.462492 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" containerID="cri-o://786043d809c1dfa004a98546906f84e535192b8c62f081b960cccf9d1884da81" gracePeriod=600 Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.949534 4745 generic.go:334] "Generic (PLEG): container finished" podID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerID="786043d809c1dfa004a98546906f84e535192b8c62f081b960cccf9d1884da81" exitCode=0 Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.949580 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerDied","Data":"786043d809c1dfa004a98546906f84e535192b8c62f081b960cccf9d1884da81"} Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.949608 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" event={"ID":"44f083ce-ad64-45d5-971c-eca93c5bddd6","Type":"ContainerStarted","Data":"8f3f2c3160c993a8c21e93cbed5fddf081026863a205dcb6a71ba65733090d0a"} Dec 08 00:42:22 crc kubenswrapper[4745]: I1208 00:42:22.949623 4745 scope.go:117] "RemoveContainer" containerID="70b6ebaddcd7736b7c709fd66c2118644c1daf354684804fcde08e1da87066d0" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.404283 4745 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-tsbkr"] Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405380 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerName="gather" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405397 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerName="gather" Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405415 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="registry-server" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405424 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="registry-server" Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405436 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="extract-utilities" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405445 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="extract-utilities" Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405458 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerName="copy" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405465 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerName="copy" Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405476 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="extract-utilities" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405485 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="extract-utilities" Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405499 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="registry-server" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405509 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="registry-server" Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405524 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="extract-content" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405531 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="extract-content" Dec 08 00:43:10 crc kubenswrapper[4745]: E1208 00:43:10.405542 4745 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="extract-content" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405550 4745 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="extract-content" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405733 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerName="gather" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.405745 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd9d0126-232f-449b-8ce5-375568eb73ec" containerName="registry-server" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.407829 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="f850cb8a-64ed-4ba4-83ec-d1b86125b1ad" containerName="registry-server" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.407861 4745 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ea437b-ff46-4fe6-9b1e-9b1f7980027a" containerName="copy" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.408468 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.412359 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-tsbkr"] Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.543030 4745 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w46sr\" (UniqueName: \"kubernetes.io/projected/ecec6005-5329-463d-b8ba-2df02caf84b9-kube-api-access-w46sr\") pod \"infrawatch-operators-tsbkr\" (UID: \"ecec6005-5329-463d-b8ba-2df02caf84b9\") " pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.644339 4745 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w46sr\" (UniqueName: \"kubernetes.io/projected/ecec6005-5329-463d-b8ba-2df02caf84b9-kube-api-access-w46sr\") pod \"infrawatch-operators-tsbkr\" (UID: \"ecec6005-5329-463d-b8ba-2df02caf84b9\") " pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.672626 4745 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w46sr\" (UniqueName: \"kubernetes.io/projected/ecec6005-5329-463d-b8ba-2df02caf84b9-kube-api-access-w46sr\") pod \"infrawatch-operators-tsbkr\" (UID: \"ecec6005-5329-463d-b8ba-2df02caf84b9\") " pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.739485 4745 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:10 crc kubenswrapper[4745]: I1208 00:43:10.965493 4745 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-tsbkr"] Dec 08 00:43:11 crc kubenswrapper[4745]: I1208 00:43:11.452605 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tsbkr" event={"ID":"ecec6005-5329-463d-b8ba-2df02caf84b9","Type":"ContainerStarted","Data":"18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da"} Dec 08 00:43:11 crc kubenswrapper[4745]: I1208 00:43:11.453131 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tsbkr" event={"ID":"ecec6005-5329-463d-b8ba-2df02caf84b9","Type":"ContainerStarted","Data":"52a813f442f6230e3f4f564fc335222f089cb5547686d836dba3e59a6109be6a"} Dec 08 00:43:11 crc kubenswrapper[4745]: I1208 00:43:11.487754 4745 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-tsbkr" podStartSLOduration=1.370021221 podStartE2EDuration="1.487713711s" podCreationTimestamp="2025-12-08 00:43:10 +0000 UTC" firstStartedPulling="2025-12-08 00:43:10.977835359 +0000 UTC m=+2146.407041659" lastFinishedPulling="2025-12-08 00:43:11.095527849 +0000 UTC m=+2146.524734149" observedRunningTime="2025-12-08 00:43:11.480477834 +0000 UTC m=+2146.909684164" watchObservedRunningTime="2025-12-08 00:43:11.487713711 +0000 UTC m=+2146.916920031" Dec 08 00:43:20 crc kubenswrapper[4745]: I1208 00:43:20.740160 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:20 crc kubenswrapper[4745]: I1208 00:43:20.740999 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:20 crc kubenswrapper[4745]: I1208 00:43:20.795413 4745 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:21 crc kubenswrapper[4745]: I1208 00:43:21.603292 4745 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:21 crc kubenswrapper[4745]: I1208 00:43:21.663542 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-tsbkr"] Dec 08 00:43:23 crc kubenswrapper[4745]: I1208 00:43:23.570901 4745 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-tsbkr" podUID="ecec6005-5329-463d-b8ba-2df02caf84b9" containerName="registry-server" containerID="cri-o://18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da" gracePeriod=2 Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.043677 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.053675 4745 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w46sr\" (UniqueName: \"kubernetes.io/projected/ecec6005-5329-463d-b8ba-2df02caf84b9-kube-api-access-w46sr\") pod \"ecec6005-5329-463d-b8ba-2df02caf84b9\" (UID: \"ecec6005-5329-463d-b8ba-2df02caf84b9\") " Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.072868 4745 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecec6005-5329-463d-b8ba-2df02caf84b9-kube-api-access-w46sr" (OuterVolumeSpecName: "kube-api-access-w46sr") pod "ecec6005-5329-463d-b8ba-2df02caf84b9" (UID: "ecec6005-5329-463d-b8ba-2df02caf84b9"). InnerVolumeSpecName "kube-api-access-w46sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.155159 4745 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w46sr\" (UniqueName: \"kubernetes.io/projected/ecec6005-5329-463d-b8ba-2df02caf84b9-kube-api-access-w46sr\") on node \"crc\" DevicePath \"\"" Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.583298 4745 generic.go:334] "Generic (PLEG): container finished" podID="ecec6005-5329-463d-b8ba-2df02caf84b9" containerID="18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da" exitCode=0 Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.583357 4745 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tsbkr" Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.583382 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tsbkr" event={"ID":"ecec6005-5329-463d-b8ba-2df02caf84b9","Type":"ContainerDied","Data":"18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da"} Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.583468 4745 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tsbkr" event={"ID":"ecec6005-5329-463d-b8ba-2df02caf84b9","Type":"ContainerDied","Data":"52a813f442f6230e3f4f564fc335222f089cb5547686d836dba3e59a6109be6a"} Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.583521 4745 scope.go:117] "RemoveContainer" containerID="18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da" Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.611417 4745 scope.go:117] "RemoveContainer" containerID="18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da" Dec 08 00:43:24 crc kubenswrapper[4745]: E1208 00:43:24.612153 4745 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da\": container with ID starting with 18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da not found: ID does not exist" containerID="18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da" Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.612212 4745 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da"} err="failed to get container status \"18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da\": rpc error: code = NotFound desc = could not find container \"18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da\": container with ID starting with 18cefa024778131ed74477fb1b0f7a865cf098f1805f6f6bfdc857af377045da not found: ID does not exist" Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.645141 4745 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-tsbkr"] Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.682204 4745 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-tsbkr"] Dec 08 00:43:24 crc kubenswrapper[4745]: I1208 00:43:24.899574 4745 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecec6005-5329-463d-b8ba-2df02caf84b9" path="/var/lib/kubelet/pods/ecec6005-5329-463d-b8ba-2df02caf84b9/volumes" Dec 08 00:44:22 crc kubenswrapper[4745]: I1208 00:44:22.460610 4745 patch_prober.go:28] interesting pod/machine-config-daemon-6czdv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 08 00:44:22 crc kubenswrapper[4745]: I1208 00:44:22.461415 4745 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6czdv" podUID="44f083ce-ad64-45d5-971c-eca93c5bddd6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515115417576024461 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015115417576017376 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015115412674016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015115412674015463 5ustar corecore